ejschwartz commited on
Commit
e491165
·
1 Parent(s): 2a4ecfb
Files changed (4) hide show
  1. Dockerfile +15 -6
  2. README.md +38 -6
  3. main.py +86 -27
  4. requirements.txt +1 -5
Dockerfile CHANGED
@@ -1,12 +1,18 @@
1
- # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
- # you will also find guides on how best to write your Dockerfile
3
 
4
- FROM python:3.12
 
 
 
 
 
5
 
6
- WORKDIR /code
 
 
7
 
8
  COPY ./requirements.txt /code/requirements.txt
9
-
10
  RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
11
 
12
  # Set up a new user named "user" with user ID 1000
@@ -17,7 +23,7 @@ USER user
17
 
18
  # Set home to the user's home directory
19
  ENV HOME=/home/user \
20
- PATH=/home/user/.local/bin:$PATH
21
 
22
  # Set the working directory to the user's home directory
23
  WORKDIR $HOME/app
@@ -28,4 +34,7 @@ COPY --chown=user . $HOME/app
28
  # Disable buffering to allow logging of standard output.
29
  ENV PYTHONUNBUFFERED=1
30
 
 
 
 
31
  CMD ["python", "main.py"]
 
1
+ # Build on top of the psychec type inference Docker image
2
+ FROM ghcr.io/edmcman/psychec-typeinference-docker:original
3
 
4
+ # Install Python and pip
5
+ RUN apt-get update && apt-get install -y \
6
+ python3 \
7
+ python3-pip \
8
+ python3-venv \
9
+ && rm -rf /var/lib/apt/lists/*
10
 
11
+ # Create a virtual environment and install dependencies
12
+ RUN python3 -m venv /opt/venv
13
+ ENV PATH="/opt/venv/bin:$PATH"
14
 
15
  COPY ./requirements.txt /code/requirements.txt
 
16
  RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
17
 
18
  # Set up a new user named "user" with user ID 1000
 
23
 
24
  # Set home to the user's home directory
25
  ENV HOME=/home/user \
26
+ PATH=/home/user/.local/bin:/opt/venv/bin:$PATH
27
 
28
  # Set the working directory to the user's home directory
29
  WORKDIR $HOME/app
 
34
  # Disable buffering to allow logging of standard output.
35
  ENV PYTHONUNBUFFERED=1
36
 
37
+ # Expose the port Gradio runs on
38
+ EXPOSE 7860
39
+
40
  CMD ["python", "main.py"]
README.md CHANGED
@@ -1,11 +1,43 @@
1
  ---
2
- title: Demo Docker Gradio
3
- emoji: 📈
4
- colorFrom: indigo
5
- colorTo: indigo
6
  sdk: docker
7
  pinned: false
8
- license: apache-2.0
9
  ---
10
 
11
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: PsycheC Type Inference
3
+ emoji: 🔍
4
+ colorFrom: blue
5
+ colorTo: purple
6
  sdk: docker
7
  pinned: false
8
+ license: lgpl-2.1
9
  ---
10
 
11
+ # PsycheC Type Inference
12
+
13
+ This Space provides a web interface to [PsycheC](https://github.com/ltcmelo/psychec), a compiler frontend for the C programming language that can infer missing type declarations.
14
+
15
+ ## What it does
16
+
17
+ PsycheC analyzes C source code and:
18
+ - Infers types for undeclared identifiers
19
+ - Generates a header file (`_gen.h`) with the inferred type declarations
20
+ - Produces a fixed C file (`_fixed.c`) that includes the generated header
21
+
22
+ ## Usage
23
+
24
+ 1. Paste your C code in the input text area
25
+ 2. Click "Run Type Inference"
26
+ 3. View the generated header and fixed C code in the output tabs
27
+
28
+ ## Example
29
+
30
+ Input:
31
+ ```c
32
+ int main() {
33
+ x = 10;
34
+ y = foo(x);
35
+ return y;
36
+ }
37
+ ```
38
+
39
+ The tool will infer the types of `x`, `y`, and `foo` and generate appropriate declarations.
40
+
41
+ ## Credits
42
+
43
+ Based on [psychec-typeinference-docker](https://github.com/edmcman/psychec-typeinference-docker), a Docker image wrapper for the PsycheC type inference functionality.
main.py CHANGED
@@ -1,33 +1,92 @@
1
  import gradio as gr
2
- import torch
3
- import requests
4
- from torchvision import transforms
5
- import frontmatter
6
 
7
- description = frontmatter.load("README.md").content
8
- model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval()
9
- response = requests.get("https://git.io/JJkYN")
10
- labels = response.text.split("\n")
 
11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
- def predict(inp):
14
- inp = transforms.ToTensor()(inp).unsqueeze(0)
15
- with torch.no_grad():
16
- prediction = torch.nn.functional.softmax(model(inp)[0], dim=0)
17
- confidences = {labels[i]: float(prediction[i]) for i in range(1000)}
18
- return confidences
19
-
20
-
21
- def run():
22
- demo = gr.Interface(
23
- fn=predict,
24
- inputs=gr.Image(type="pil"),
25
- outputs=gr.Label(num_top_classes=3),
26
- description=description
27
- )
28
-
29
- demo.launch(server_name="0.0.0.0", server_port=7860)
30
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
  if __name__ == "__main__":
33
- run()
 
 
1
  import gradio as gr
2
+ import subprocess
3
+ import tempfile
4
+ import os
5
+ from pathlib import Path
6
 
7
+ EXAMPLE_CODE = """int main() {
8
+ x = 10;
9
+ y = foo(x);
10
+ return y;
11
+ }"""
12
 
13
+ def run_psychec(c_code: str) -> tuple[str, str, str]:
14
+ """Run PsycheC type inference on the provided C code."""
15
+ if not c_code.strip():
16
+ return "Please provide some C code.", "", ""
17
+
18
+ with tempfile.TemporaryDirectory() as tmpdir:
19
+ input_file = Path(tmpdir) / "input.c"
20
+ input_file.write_text(c_code)
21
+
22
+ try:
23
+ result = subprocess.run(
24
+ ["python3", "/workspace/psychec/reconstruct.py", str(input_file)],
25
+ capture_output=True,
26
+ text=True,
27
+ timeout=30,
28
+ cwd="/workspace/psychec"
29
+ )
30
+
31
+ gen_header = Path(tmpdir) / "input_gen.h"
32
+ fixed_c = Path(tmpdir) / "input_fixed.c"
33
+
34
+ header_content = gen_header.read_text() if gen_header.exists() else "No header generated"
35
+ fixed_content = fixed_c.read_text() if fixed_c.exists() else "No fixed file generated"
36
+
37
+ log_output = result.stdout + ("\n" + result.stderr if result.stderr else "")
38
+ if not log_output.strip():
39
+ log_output = "Type inference completed successfully." if result.returncode == 0 else "Type inference failed."
40
+
41
+ return log_output, header_content, fixed_content
42
+
43
+ except subprocess.TimeoutExpired:
44
+ return "Error: Type inference timed out after 30 seconds.", "", ""
45
+ except Exception as e:
46
+ return f"Error: {e}", "", ""
47
 
48
+ def create_demo():
49
+ with gr.Blocks(title="PsycheC Type Inference") as demo:
50
+ gr.Markdown("# 🔍 PsycheC Type Inference")
51
+ gr.Markdown(
52
+ "Analyze C code and infer missing type declarations. "
53
+ "PsycheC will generate a header file with inferred types and a fixed C file."
54
+ )
55
+
56
+ with gr.Row():
57
+ with gr.Column():
58
+ code_input = gr.Code(
59
+ label="C Source Code",
60
+ language="c",
61
+ value=EXAMPLE_CODE,
62
+ lines=15
63
+ )
64
+ run_btn = gr.Button("Run Type Inference", variant="primary")
65
+
66
+ with gr.Column():
67
+ with gr.Tabs():
68
+ with gr.Tab("Log"):
69
+ log_output = gr.Textbox(label="Output Log", lines=10)
70
+ with gr.Tab("Generated Header"):
71
+ header_output = gr.Code(label="Generated Header (_gen.h)", language="c", lines=10)
72
+ with gr.Tab("Fixed C File"):
73
+ fixed_output = gr.Code(label="Fixed C File (_fixed.c)", language="c", lines=10)
74
+
75
+ run_btn.click(
76
+ fn=run_psychec,
77
+ inputs=[code_input],
78
+ outputs=[log_output, header_output, fixed_output]
79
+ )
80
+
81
+ gr.Markdown(
82
+ "---\n"
83
+ "Based on [PsycheC](https://github.com/ltcmelo/psychec) - "
84
+ "A compiler frontend for C with type inference capabilities.\n\n"
85
+ "Docker image: [psychec-typeinference-docker](https://github.com/edmcman/psychec-typeinference-docker)"
86
+ )
87
+
88
+ return demo
89
 
90
  if __name__ == "__main__":
91
+ demo = create_demo()
92
+ demo.launch(server_name="0.0.0.0", server_port=7860)
requirements.txt CHANGED
@@ -1,5 +1 @@
1
- gradio
2
- torch
3
- torchvision
4
- requests
5
- python-frontmatter
 
1
+ gradio