Spaces:
Running
Running
Update codegen.py
Browse files- codegen.py +7 -8
codegen.py
CHANGED
@@ -1,23 +1,22 @@
|
|
|
|
1 |
import transformers
|
2 |
from transformers import pipeline
|
3 |
|
4 |
def generate(idea):
|
5 |
-
"""
|
|
|
6 |
|
7 |
Args:
|
8 |
-
idea: The idea for the code to be generated.
|
9 |
|
10 |
-
|
11 |
-
The generated code
|
12 |
"""
|
13 |
-
|
14 |
# Load the code generation model
|
15 |
model_name = "bigscience/T0_3B" # Use a model that works for code generation
|
16 |
model = transformers.AutoModelForCausalLM.from_pretrained(model_name)
|
17 |
tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
|
18 |
|
19 |
-
# Generate the code
|
20 |
-
|
21 |
# Generate the code
|
22 |
input_text = f"""
|
23 |
# Idea: {idea}
|
@@ -43,4 +42,4 @@ def generate(idea):
|
|
43 |
# Example usage
|
44 |
idea = "Write a Python function to calculate the factorial of a number"
|
45 |
code = generate(idea)
|
46 |
-
print(code)
|
|
|
1 |
+
|
2 |
import transformers
|
3 |
from transformers import pipeline
|
4 |
|
5 |
def generate(idea):
|
6 |
+
"""
|
7 |
+
Generates code based on a given idea using the bigscience/T0_3B model.
|
8 |
|
9 |
Args:
|
10 |
+
idea (str): The idea for the code to be generated.
|
11 |
|
12 |
+
Returns:
|
13 |
+
str: The generated code.
|
14 |
"""
|
|
|
15 |
# Load the code generation model
|
16 |
model_name = "bigscience/T0_3B" # Use a model that works for code generation
|
17 |
model = transformers.AutoModelForCausalLM.from_pretrained(model_name)
|
18 |
tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
|
19 |
|
|
|
|
|
20 |
# Generate the code
|
21 |
input_text = f"""
|
22 |
# Idea: {idea}
|
|
|
42 |
# Example usage
|
43 |
idea = "Write a Python function to calculate the factorial of a number"
|
44 |
code = generate(idea)
|
45 |
+
print(code)
|