Spaces:
Sleeping
Sleeping
add usage section
Browse files- start_app.py +27 -4
start_app.py
CHANGED
|
@@ -12,12 +12,33 @@ from huggingface_hub import HfApi, get_token
|
|
| 12 |
|
| 13 |
|
| 14 |
CMD = ["python" ,"run_job.py"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
with open("README.md") as f:
|
| 17 |
METADATA = yaml.safe_load(f.read().split("---\n")[1])
|
| 18 |
TITLE = METADATA["title"]
|
| 19 |
EMOJI = METADATA["emoji"]
|
| 20 |
-
|
|
|
|
| 21 |
|
| 22 |
try:
|
| 23 |
process = subprocess.run(CMD + ["--help"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
@@ -75,7 +96,7 @@ def run(src, config, split, dst, query, oauth_token: gr.OAuthToken | None, profi
|
|
| 75 |
resp = requests.post(
|
| 76 |
f"https://huggingface.co/api/jobs/{username}",
|
| 77 |
json={
|
| 78 |
-
"spaceId":
|
| 79 |
"arguments": args,
|
| 80 |
"command": CMD,
|
| 81 |
"environment": {"HF_TOKEN": token},
|
|
@@ -125,6 +146,7 @@ with gr.Blocks() as demo:
|
|
| 125 |
gr.Markdown(f"# {TITLE} {EMOJI}")
|
| 126 |
with gr.Column():
|
| 127 |
gr.LoginButton()
|
|
|
|
| 128 |
with gr.Row():
|
| 129 |
with gr.Column(scale=10):
|
| 130 |
with gr.Row():
|
|
@@ -197,7 +219,7 @@ if HELP:
|
|
| 197 |
|
| 198 |
with demo.route("Jobs", "/jobs") as page:
|
| 199 |
gr.Markdown("# Jobs")
|
| 200 |
-
jobs_dataframe = gr.DataFrame()
|
| 201 |
|
| 202 |
@page.load(outputs=[jobs_dataframe])
|
| 203 |
def list_jobs(oauth_token: gr.OAuthToken | None, profile: gr.OAuthProfile | None):
|
|
@@ -217,11 +239,12 @@ with demo.route("Jobs", "/jobs") as page:
|
|
| 217 |
"id": job["metadata"]["id"],
|
| 218 |
"created_at": job["metadata"]["created_at"],
|
| 219 |
"stage": job["compute"]["status"]["stage"],
|
|
|
|
| 220 |
"command": str(job["compute"]["spec"]["extra"]["command"]),
|
| 221 |
"args": str(job["compute"]["spec"]["extra"]["args"]),
|
| 222 |
}
|
| 223 |
for job in resp.json()
|
| 224 |
-
if job["compute"]["spec"]["extra"]["input"]["spaceId"] ==
|
| 225 |
])
|
| 226 |
|
| 227 |
if __name__ == "__main__":
|
|
|
|
| 12 |
|
| 13 |
|
| 14 |
CMD = ["python" ,"run_job.py"]
|
| 15 |
+
ARG_NAMES = ["<src>", "<dst>", "<query>", "[-c config]", "[-s split]", "[-p private]"]
|
| 16 |
+
|
| 17 |
+
CONTENT = """
|
| 18 |
+
## Usage:
|
| 19 |
+
|
| 20 |
+
```bash
|
| 21 |
+
curl -L 'https://huggingface.co/api/jobs/<username>' \
|
| 22 |
+
-H 'Content-Type: application/json' \
|
| 23 |
+
-H 'Authorization: Bearer <hf_token>' \
|
| 24 |
+
-d '{{
|
| 25 |
+
"spaceId": "{SPACE_ID}",
|
| 26 |
+
"command": {CMD},
|
| 27 |
+
"arguments": {ARG_NAMES},
|
| 28 |
+
"environment": {{"HF_TOKEN": <hf_token>}},
|
| 29 |
+
"flavor": "cpu-basic"
|
| 30 |
+
}}'
|
| 31 |
+
```
|
| 32 |
+
|
| 33 |
+
## Example:
|
| 34 |
+
"""
|
| 35 |
|
| 36 |
with open("README.md") as f:
|
| 37 |
METADATA = yaml.safe_load(f.read().split("---\n")[1])
|
| 38 |
TITLE = METADATA["title"]
|
| 39 |
EMOJI = METADATA["emoji"]
|
| 40 |
+
SPACE_ID = os.environ.get("SPACE_ID") or "lhoestq/run-duckdb"
|
| 41 |
+
|
| 42 |
|
| 43 |
try:
|
| 44 |
process = subprocess.run(CMD + ["--help"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
| 96 |
resp = requests.post(
|
| 97 |
f"https://huggingface.co/api/jobs/{username}",
|
| 98 |
json={
|
| 99 |
+
"spaceId": SPACE_ID,
|
| 100 |
"arguments": args,
|
| 101 |
"command": CMD,
|
| 102 |
"environment": {"HF_TOKEN": token},
|
|
|
|
| 146 |
gr.Markdown(f"# {TITLE} {EMOJI}")
|
| 147 |
with gr.Column():
|
| 148 |
gr.LoginButton()
|
| 149 |
+
gr.Markdown(CONTENT.format(SPACE_ID=SPACE_ID, CMD=json.dumps(CMD), ARG_NAMES=json.dumps(ARG_NAMES)))
|
| 150 |
with gr.Row():
|
| 151 |
with gr.Column(scale=10):
|
| 152 |
with gr.Row():
|
|
|
|
| 219 |
|
| 220 |
with demo.route("Jobs", "/jobs") as page:
|
| 221 |
gr.Markdown("# Jobs")
|
| 222 |
+
jobs_dataframe = gr.DataFrame(datatype="markdown")
|
| 223 |
|
| 224 |
@page.load(outputs=[jobs_dataframe])
|
| 225 |
def list_jobs(oauth_token: gr.OAuthToken | None, profile: gr.OAuthProfile | None):
|
|
|
|
| 239 |
"id": job["metadata"]["id"],
|
| 240 |
"created_at": job["metadata"]["created_at"],
|
| 241 |
"stage": job["compute"]["status"]["stage"],
|
| 242 |
+
"output": f'[logs](https://huggingface.co/api/jobs/{username}/{job["metadata"]["id"]}/logs-stream)',
|
| 243 |
"command": str(job["compute"]["spec"]["extra"]["command"]),
|
| 244 |
"args": str(job["compute"]["spec"]["extra"]["args"]),
|
| 245 |
}
|
| 246 |
for job in resp.json()
|
| 247 |
+
if job["compute"]["spec"]["extra"]["input"]["spaceId"] == SPACE_ID
|
| 248 |
])
|
| 249 |
|
| 250 |
if __name__ == "__main__":
|