(no commit message)

This commit is contained in:
2026-02-22 12:42:09 -08:00
parent a5a55ca160
commit de6ded791f
10 changed files with 105 additions and 2 deletions

View File

@@ -1,2 +0,0 @@
# simple_repo_with_compile

4
auto_classes.json Normal file
View File

@@ -0,0 +1,4 @@
{
"AutoConfig": "program.program.ExampleConfig",
"AutoProgram": "program.program.ExampleProgram"
}

9
compile.py Normal file
View File

@@ -0,0 +1,9 @@
import sys
from program.program import ExampleConfig, ExampleProgram
if __name__ == "__main__":
username = sys.argv[1] # ← first arg after script name (username)
program = ExampleProgram(ExampleConfig(output_type="str"), runtime_param="hi")
repo_path = f"{username}/simple_repo_with_compile"
program.push_to_hub(repo_path, with_code=True)

6
config.json Normal file
View File

@@ -0,0 +1,6 @@
{
"model": null,
"output_type": "str",
"lm": "openai/gpt-4o",
"number": 1
}

0
include_me_too.txt Normal file
View File

42
program.json Normal file
View File

@@ -0,0 +1,42 @@
{
"predictor": {
"traces": [],
"train": [],
"demos": [],
"signature": {
"instructions": "Given the fields `question`, `context`, produce the fields `answer`.",
"fields": [
{
"prefix": "Question:",
"description": "${question}"
},
{
"prefix": "Context:",
"description": "${context}"
},
{
"prefix": "Answer:",
"description": "Answer to the question, based on the passage"
}
]
},
"lm": {
"model": "openai/gpt-4o",
"model_type": "chat",
"cache": true,
"num_retries": 3,
"finetuning_model": null,
"launch_kwargs": {},
"train_kwargs": {},
"temperature": null,
"max_tokens": null
}
},
"metadata": {
"dependency_versions": {
"python": "3.11",
"dspy": "3.1.2",
"cloudpickle": "3.1"
}
}
}

0
program/__init__.py Normal file
View File

2
program/mod.py Normal file
View File

@@ -0,0 +1,2 @@
def function_from_mod() -> str:
return "function_from_mod"

31
program/program.py Normal file
View File

@@ -0,0 +1,31 @@
from typing import Literal
import dspy
from modaic import PrecompiledConfig, PrecompiledProgram
from .mod import function_from_mod # noqa: F401
class Summarize(dspy.Signature):
question = dspy.InputField()
context = dspy.InputField()
answer = dspy.OutputField(desc="Answer to the question, based on the passage")
class ExampleConfig(PrecompiledConfig):
output_type: Literal["bool", "str"]
lm: str = "openai/gpt-4o"
number: int = 1
class ExampleProgram(PrecompiledProgram):
config: ExampleConfig
def __init__(self, config: ExampleConfig, runtime_param: str, **kwargs):
super().__init__(config, **kwargs)
self.predictor = dspy.Predict(Summarize)
self.predictor.lm = dspy.LM(self.config.lm)
self.runtime_param = runtime_param
def forward(self, question: str, context: str) -> str:
return self.predictor(question=question, context=context)

11
pyproject.toml Normal file
View File

@@ -0,0 +1,11 @@
[project]
name = "simple_repo_with_compile"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.10"
dependencies = ["dspy", "modaic"]
[tool.uv.sources]
modaic = { workspace = true }