WithTravis commited on
Commit
5e6de6d
·
1 Parent(s): 1634cb1

Downgrade python version: 3.12 -> 3.11

Browse files
Files changed (2) hide show
  1. Dockerfile +1 -4
  2. app.py +6 -6
Dockerfile CHANGED
@@ -1,7 +1,4 @@
1
- # Read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
- # you will also find guides on how best to write your Dockerfile
3
-
4
- FROM python:3.12
5
 
6
  RUN useradd -m -u 1000 user
7
  USER user
 
1
+ FROM python:3.11
 
 
 
2
 
3
  RUN useradd -m -u 1000 user
4
  USER user
app.py CHANGED
@@ -1,4 +1,4 @@
1
- from fastapi import FastAPI
2
  import solara
3
  import random
4
  import torch
@@ -6,12 +6,12 @@ import torch.nn.functional as F
6
  import pandas as pd
7
  from transformers import AutoTokenizer, AutoModelForCausalLM
8
 
9
- app = FastAPI()
 
 
 
 
10
 
11
- @app.get("/")
12
- def greet_json():
13
- return {"Hello": "World!"}
14
-
15
  tokenizer = AutoTokenizer.from_pretrained('gemma-3-1b-it-qat-q4_0-gguf')
16
  model = AutoModelForCausalLM.from_pretrained('gemma-3-1b-it-qat-q4_0-gguf')
17
  text1 = solara.reactive("Never gonna give you up, never gonna let you")
 
1
+ #from fastapi import FastAPI
2
  import solara
3
  import random
4
  import torch
 
6
  import pandas as pd
7
  from transformers import AutoTokenizer, AutoModelForCausalLM
8
 
9
+ #app = FastAPI()
10
+
11
+ #@app.get("/")
12
+ #def greet_json():
13
+ #return {"Hello": "World!"}
14
 
 
 
 
 
15
  tokenizer = AutoTokenizer.from_pretrained('gemma-3-1b-it-qat-q4_0-gguf')
16
  model = AutoModelForCausalLM.from_pretrained('gemma-3-1b-it-qat-q4_0-gguf')
17
  text1 = solara.reactive("Never gonna give you up, never gonna let you")