Spaces:
Sleeping
Sleeping
Commit
·
90c9489
1
Parent(s):
ff079ce
initial commit
Browse files
gen.py
CHANGED
@@ -2,8 +2,6 @@ import torch
|
|
2 |
import sys
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
4 |
import json
|
5 |
-
import jsonschema
|
6 |
-
from jsonschema import validate, ValidationError
|
7 |
|
8 |
tokenizer = AutoTokenizer.from_pretrained('google/gemma-2-2b-it')
|
9 |
|
@@ -182,6 +180,7 @@ def generate(event):
|
|
182 |
|
183 |
|
184 |
output_text = tokenizer.decode(tokens[0], skip_special_tokens=False)
|
|
|
185 |
user_prompt_length = len(f"<bos><start_of_turn>user\n{prompt}\n{event}<end_of_turn>\n<start_of_turn>model\n")
|
186 |
|
187 |
json_start_index = output_text.find("<json>")
|
@@ -201,12 +200,11 @@ def generate(event):
|
|
201 |
if last_brace_index != -1:
|
202 |
json_string = json_string[:last_brace_index + 1]
|
203 |
|
204 |
-
|
205 |
-
# Validate JSON
|
206 |
try:
|
207 |
-
|
208 |
-
return
|
209 |
-
except
|
210 |
return f"Error: Invalid JSON - {e}"
|
211 |
|
212 |
else:
|
|
|
2 |
import sys
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
4 |
import json
|
|
|
|
|
5 |
|
6 |
tokenizer = AutoTokenizer.from_pretrained('google/gemma-2-2b-it')
|
7 |
|
|
|
180 |
|
181 |
|
182 |
output_text = tokenizer.decode(tokens[0], skip_special_tokens=False)
|
183 |
+
print(output_text)
|
184 |
user_prompt_length = len(f"<bos><start_of_turn>user\n{prompt}\n{event}<end_of_turn>\n<start_of_turn>model\n")
|
185 |
|
186 |
json_start_index = output_text.find("<json>")
|
|
|
200 |
if last_brace_index != -1:
|
201 |
json_string = json_string[:last_brace_index + 1]
|
202 |
|
203 |
+
# Load JSON without validation
|
|
|
204 |
try:
|
205 |
+
data = json.loads(json_string)
|
206 |
+
return data # Return the parsed JSON data
|
207 |
+
except json.JSONDecodeError as e:
|
208 |
return f"Error: Invalid JSON - {e}"
|
209 |
|
210 |
else:
|