Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add python and JSON file export #218

Merged
merged 1 commit into from
Feb 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 16 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
Codepod provides the interactive coding experience popularized by Jupyter, but
with scalability and production-readiness. Users can still incrementally build
up code by trying out a small code snippet each time. But they would not be
overwhelmed by the great number of code snippets as the projects grow.
overwhelmed by the great number of code snippets as the projects grow.

<div align="center"><h2>Try it online at <a href="https://app.codepod.io">https://app.codepod.io!</a></a></div>

Expand Down Expand Up @@ -42,18 +42,31 @@ node.js process inside the container do the compiling and hot-reloading.

To install docker-compose, follow the official [Docker documentation](https://docs.docker.com/compose/install/linux/).

## .env file

First, create a `dev/.env` file with the following content (leave as is or change the value to
whatever you want). Leave the `GOOGLE_CLIENT_ID` empty if you do not need the OAuth provided by Google.
whatever you want).

```properties
POSTGRES_USER=myusername
POSTGRES_PASSWORD=mypassword
POSTGRES_DB=mydbname
JWT_SECRET=mysupersecretjwttoken

GOOGLE_CLIENT_ID=<google oauth client id>

EXPORT_AWS_S3_REGION=us-west-1
EXPORT_AWS_S3_BUCKET=<YOUR_BUCKET_NAME>
EXPORT_AWS_S3_ACCESS_KEY_ID=<YOUR_ACCESS_KEY>
EXPORT_AWS_S3_SECRET_ACCESS_KEY=<YOUR_SECRET_ACCESS_KEY>
```

Start the stack:
Optional:

- Leave the `GOOGLE_CLIENT_ID` empty if you do not need the OAuth provided by Google.
- `EXPORT_AWS_S3_XXX` are used for file export. You could leave it empty if you don't use it.

## Start the stack

```bash
cd dev
Expand Down
1 change: 1 addition & 0 deletions api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"@prisma/client": "4.3.1",
"apollo-server": "^3.5.0",
"apollo-server-express": "3.10.2",
"aws-sdk": "^2.1320.0",
"bcryptjs": "^2.4.3",
"dockerode": "^3.3.1",
"google-auth-library": "^8.7.0",
Expand Down
3 changes: 3 additions & 0 deletions api/src/resolver.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import UserResolver from "./resolver_user";
import RepoResolver from "./resolver_repo";
import RuntimeResolver from "./resolver_runtime";
import ExportResolver from "./resolver_export";

export const resolvers = {
Query: {
Expand All @@ -10,10 +11,12 @@ export const resolvers = {
...UserResolver.Query,
...RepoResolver.Query,
...RuntimeResolver.Query,
...ExportResolver.Query,
},
Mutation: {
...UserResolver.Mutation,
...RepoResolver.Mutation,
...RuntimeResolver.Mutation,
...ExportResolver.Mutation,
},
};
175 changes: 175 additions & 0 deletions api/src/resolver_export.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
import Prisma from "@prisma/client";

import AWS from "aws-sdk";
import { writeFile, readFile, unlink } from "fs/promises";

console.log("REGION", process.env.EXPORT_AWS_S3_REGION);

// Set your AWS region and credentials
AWS.config.update({
region: process.env.EXPORT_AWS_S3_REGION,
accessKeyId: process.env.EXPORT_AWS_S3_ACCESS_KEY_ID,
secretAccessKey: process.env.EXPORT_AWS_S3_SECRET_ACCESS_KEY,
});

// Create a new S3 object
const s3 = new AWS.S3();

async function uploadToS3WithExpiration(filename, content) {
try {
await writeFile(filename, content);

// Set the S3 parameters
const params = {
Bucket: process.env.EXPORT_AWS_S3_BUCKET as string,
Key: filename,
Body: await readFile(filename),
};

// Upload the file to S3 and set an expiration policy
const { Location } = await s3.upload(params).promise();

// Delete the generated file
await unlink(filename);
return Location;
} catch (error) {
console.log("Error uploading file:", error);
}
}

const { PrismaClient } = Prisma;

const prisma = new PrismaClient();

/**
* Export to a JSON file for the pods' raw data.
*/
async function exportJSON(_, { repoId }, { userId }) {
const repo = await prisma.repo.findFirst({
where: {
OR: [
{ id: repoId, public: true },
{ id: repoId, owner: { id: userId || "undefined" } },
{ id: repoId, collaborators: { some: { id: userId || "undefined" } } },
],
},
include: {
pods: {
include: {
children: true,
parent: true,
},
orderBy: {
index: "asc",
},
},
},
});
// now export repo to a file
if (!repo) throw Error("Repo not exists.");
const filename = `${
repo.name || "Untitled"
}-${new Date().toISOString()}.json`;
const aws_url = await uploadToS3WithExpiration(
filename,
JSON.stringify({ name: repo.name, version: "v0.0.1", pods: repo.pods })
);
return aws_url;
}

interface Pod {
type: "CODE" | "DECK";
id: string;
children: string[];
content: string;
name: string;
}

function generate_dfs(pod: Pod, pods: Record<string, Pod>, level) {
const space = " ".repeat(level);
if (pod.type === "CODE")
return [
space + `# BEGIN POD ${pod.id}`,
space + `${pod.content}`,
space + `# END POD ${pod.id}`,
].join("\n");
else {
// this is a DECK
let ids = pod.children;
const children_content = ids
.map((id) => generate_dfs(pods[id], pods, level + 1))
.join("\n\n");
return [
space + `# BEGIN SCOPE ${pod.name} ${pod.id}`,
children_content,
space + `# END SCOPE ${pod.name} ${pod.id}`,
].join("\n");
}
}

function pods_list2dict(pods) {
// build a id=>pod map
let d = {};
for (const pod of pods) {
d[pod.id] = pod;
pod.children = [];
pod.content = JSON.parse(pod.content);
}
d["ROOT"] = {
id: "ROOT",
type: "DECK",
ns: "ROOT",
children: [],
};
// construct .children
for (const pod of pods) {
pod.parentId = pod.parentId || "ROOT";
d[pod.parentId].children.push(pod.id);
}
return d;
}

/**
* export to a Python file.
*/
async function exportFile(_, { repoId }, { userId }) {
const repo = await prisma.repo.findFirst({
where: {
OR: [
{ id: repoId, public: true },
{ id: repoId, owner: { id: userId || "undefined" } },
{ id: repoId, collaborators: { some: { id: userId || "undefined" } } },
],
},
include: {
pods: {
include: {
children: true,
parent: true,
},
orderBy: {
index: "asc",
},
},
},
});
// now export repo to a file
if (!repo) throw Error("Repo not exists.");

let d = pods_list2dict(repo.pods);
// let decks = pods.filter((pod) => pod.type === "DECK");
const content = generate_dfs(d["ROOT"], d, 0);

// create a hierarchy of the pods
const filename = `${repo.name || "Untitled"}-${new Date().toISOString()}.py`;
const aws_url = await uploadToS3WithExpiration(filename, content);
return aws_url;
}

export default {
Query: {},
Mutation: {
exportJSON,
exportFile,
},
};
3 changes: 3 additions & 0 deletions api/src/typedefs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,5 +127,8 @@ export const typeDefs = gql`
updateVisibility(repoId: String!, isPublic: Boolean!): Boolean
addCollaborator(repoId: String!, email: String!): Boolean
deleteCollaborator(repoId: String!, collaboratorId: String!): Boolean

exportJSON(repoId: String!): String!
exportFile(repoId: String!): String!
}
`;
Loading