Skip to content

Commit

Permalink
handler- migrate more internal things to nektro/ox repo
Browse files Browse the repository at this point in the history
  • Loading branch information
nektro committed Apr 18, 2022
1 parent 36d1f7d commit d7b8714
Show file tree
Hide file tree
Showing 14 changed files with 84 additions and 192 deletions.
38 changes: 3 additions & 35 deletions src/handler/_handler.zig
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ const ox = @import("ox").www;

const mime = @import("../mime.zig");
const db = @import("../db/_db.zig");
const cookies = @import("cookies");

const _internal = @import("./_internal.zig");
const _index = @import("./index.zig");
Expand Down Expand Up @@ -41,7 +40,7 @@ pub fn getHandler(comptime oa2: type) http.RequestHandler(void) {
Route2(.get, "/about", StaticPek("/about.pek", "About")),
Route3(.get, "/login", oa2.login),
Route3(.get, "/callback", oa2.callback),
Route3(.get, "/logout", logout),
Route3(.get, "/logout", ox.logout),
Route2(.get, "/dashboard", _dashboard),
Route2(.get, "/import", _import),
Route2(.get, "/do_import", _do_import),
Expand All @@ -55,7 +54,7 @@ pub fn getHandler(comptime oa2: type) http.RequestHandler(void) {
}

fn Route1(comptime method: http.Request.Method, comptime endpoint: string, comptime C: ?type, comptime f: anytype) http.router.Route(void) {
return @field(http.router.Builder(void), @tagName(method))(endpoint, C, Middleware(f).next);
return @field(http.router.Builder(void), @tagName(method))(endpoint, C, ox.Route(f));
}

fn Route2(comptime method: http.Request.Method, comptime endpoint: string, comptime T: type) http.router.Route(void) {
Expand All @@ -66,17 +65,6 @@ fn Route3(comptime method: http.Request.Method, comptime endpoint: string, compt
return Route1(method, endpoint, null, f);
}

fn Middleware(comptime f: anytype) type {
return struct {
pub fn next(_: void, response: *http.Response, request: http.Request, captures: ?*const anyopaque) !void {
f({}, response, request, captures) catch |err| {
if (@as(anyerror, err) == error.HttpNoOp) return;
return err;
};
}
};
}

fn file_route(comptime path: string) http.router.Route(void) {
const T = struct {
fn f(_: void, response: *http.Response, request: http.Request, captures: ?*const anyopaque) !void {
Expand Down Expand Up @@ -106,7 +94,7 @@ fn StaticPek(comptime path: string, comptime title: string) type {
pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?*const anyopaque) !void {
_ = captures;

try _internal.writePageResponse(request.arena, response, request, path, .{
try ox.writePageResponse(request.arena, response, request, path, .{
.aquila_version = @import("root").version,
.page = "static",
.title = title,
Expand All @@ -116,17 +104,6 @@ fn StaticPek(comptime path: string, comptime title: string) type {
};
}

pub fn isLoggedIn(request: http.Request) !bool {
const x = ox.token.veryifyRequest(request) catch |err| switch (err) {
error.NoTokenFound, error.InvalidSignature => return false,
else => return err,
};
// don't need to waste hops to the db to check if its a value user ID because
// if the signature is valid we know it came from us
_ = x;
return true;
}

pub fn saveInfo(response: *http.Response, request: http.Request, idp: oauth2.Provider, id: string, name: string, val: json.Value, val2: json.Value) !void {
_ = name;
_ = val2;
Expand All @@ -149,15 +126,6 @@ pub fn getAccessToken(ulid: string) ?string {
return _internal.access_tokens.get(ulid);
}

pub fn logout(_: void, response: *http.Response, request: http.Request, captures: ?*const anyopaque) !void {
std.debug.assert(captures == null);
_ = response;
_ = request;

try cookies.delete(response, "jwt");
try _internal.redirectTo(response, "./");
}

pub fn openFile(dir: std.fs.Dir, path: string) !?std.fs.File {
return dir.openFile(path, .{}) catch |err| switch (err) {
error.FileNotFound => return null,
Expand Down
108 changes: 7 additions & 101 deletions src/handler/_internal.zig
Original file line number Diff line number Diff line change
@@ -1,56 +1,22 @@
const std = @import("std");
const string = []const u8;
const http = @import("apple_pie");
const files = @import("self/files");
const pek = @import("pek");
const extras = @import("extras");
const ulid = @import("ulid");
const root = @import("root");
const options = @import("build_options");
const koino = @import("koino");
const ox = @import("ox").www;

const cookies = @import("cookies");
const db = @import("../db/_db.zig");

const epoch: i64 = 1577836800000; // 'Jan 1 2020' -> unix milli

pub var access_tokens: std.StringHashMap(string) = undefined;
pub var token_liveness: std.StringHashMap(i64) = undefined;
pub var token_expires: std.StringHashMap(i64) = undefined;
pub var last_check: i64 = 0;

pub fn writePageResponse(alloc: std.mem.Allocator, response: *http.Response, request: http.Request, comptime name: string, data: anytype) !void {
_ = request;
try response.headers.put("Content-Type", "text/html");

var h = std.hash.Wyhash.init(0);
h.update(@field(files, name));
inline for (std.meta.fields(@TypeOf(data))) |field| {
hashUp(&h, @field(data, field.name));
}
try response.headers.put("Etag", try std.fmt.allocPrint(alloc, "\"{x}\"", .{h.final()}));

const w = response.writer();

const headers = try request.headers(alloc);
// extra check caused by https://github.com/Luukdegram/apple_pie/issues/70
if (std.mem.eql(u8, headers.get("Accept") orelse headers.get("accept") orelse "", "application/json")) {
try std.json.stringify(data, .{}, w);
return;
}

const head = files.@"/_header.pek";
const page = @field(files, name);
const tmpl = comptime pek.parse(head ++ page);
try pek.compile(root, alloc, w, tmpl, data);
}

pub fn getUser(response: *http.Response, request: http.Request) !db.User {
const x = ox.token.veryifyRequest(request) catch |err| switch (err) {
error.NoTokenFound, error.InvalidSignature => |e| {
try response.headers.put("X-Jwt-Fail", @errorName(e));
try redirectTo(response, "./login");
try ox.redirectTo(response, "./login");
return error.HttpNoOp;
},
else => return err,
Expand Down Expand Up @@ -101,45 +67,33 @@ pub fn mergeSlices(alloc: std.mem.Allocator, comptime T: type, side_a: []const T
return list.toOwnedSlice();
}

pub fn assert(cond: bool, response: *http.Response, status: http.Response.Status, comptime fmt: string, args: anytype) !void {
if (!cond) {
return fail(response, status, fmt, args);
}
}

pub fn fail(response: *http.Response, status: http.Response.Status, comptime fmt: string, args: anytype) (http.Response.Writer.Error || error{HttpNoOp}) {
response.status_code = status;
try response.writer().print(fmt ++ "\n", args);
return error.HttpNoOp;
}

pub fn reqRemote(request: http.Request, response: *http.Response, id: u64) !db.Remote {
const alloc = request.arena;
const r = try db.Remote.byKey(alloc, .id, id);
return r orelse fail(response, .not_found, "error: remote by id '{d}' not found", .{id});
return r orelse ox.fail(response, .not_found, "error: remote by id '{d}' not found", .{id});
}

pub fn reqUser(request: http.Request, response: *http.Response, r: db.Remote, name: string) !db.User {
const alloc = request.arena;
const u = try r.findUserBy(alloc, .name, name);
return u orelse fail(response, .not_found, "error: user by name '{s}' not found", .{name});
return u orelse ox.fail(response, .not_found, "error: user by name '{s}' not found", .{name});
}

pub fn reqPackage(request: http.Request, response: *http.Response, u: db.User, name: string) !db.Package {
const alloc = request.arena;
const p = try u.findPackageBy(alloc, .name, name);
return p orelse fail(response, .not_found, "error: package by name '{s}' not found", .{name});
return p orelse ox.fail(response, .not_found, "error: package by name '{s}' not found", .{name});
}

pub fn reqVersion(request: http.Request, response: *http.Response, p: db.Package, major: u32, minor: u32) !db.Version {
const alloc = request.arena;
const v = try p.findVersionAt(alloc, major, minor);
return v orelse fail(response, .not_found, "error: version by id 'v{d}.{d}' not found", .{ major, minor });
return v orelse ox.fail(response, .not_found, "error: version by id 'v{d}.{d}' not found", .{ major, minor });
}

pub fn parseInt(comptime T: type, input: ?string, response: *http.Response, comptime fmt: string, args: anytype) !T {
const str = input orelse return fail(response, .bad_request, fmt, args);
return std.fmt.parseUnsigned(T, str, 10) catch fail(response, .bad_request, fmt, args);
const str = input orelse return ox.fail(response, .bad_request, fmt, args);
return std.fmt.parseUnsigned(T, str, 10) catch ox.fail(response, .bad_request, fmt, args);
}

pub fn rename(old_path: string, new_path: string) !void {
Expand All @@ -152,11 +106,6 @@ pub fn rename(old_path: string, new_path: string) !void {
};
}

pub fn redirectTo(response: *http.Response, dest: string) !void {
try response.headers.put("Location", dest);
try response.writeHeader(.found);
}

pub fn readFileContents(dir: std.fs.Dir, alloc: std.mem.Allocator, path: string) !?string {
const file = dir.openFile(path, .{}) catch |err| switch (err) {
error.FileNotFound => return null,
Expand All @@ -167,49 +116,6 @@ pub fn readFileContents(dir: std.fs.Dir, alloc: std.mem.Allocator, path: string)
return try file.reader().readAllAlloc(alloc, 1024 * 1024 * 2); // 2mb
}

pub fn hashUp(h: *std.hash.Wyhash, item: anytype) void {
if (comptime std.meta.trait.isZigString(@TypeOf(item))) {
h.update(item);
return;
}
switch (@TypeOf(item)) {
db.Remote => h.update(item.domain),
db.Remote.Repo, db.User, db.Package, db.Version => hashUp(h, item.id),
u64 => h.update(&std.mem.toBytes(item)),
bool => h.update(if (item) "true" else "false"),

[]const db.User,
[]const db.Package,
[]const db.Version,
[]const db.Remote.Repo,
[]const db.CountStat,
[]const db.TimeStat,
=> {
h.update("[");
defer h.update("]");
for (item) |inner| {
hashUp(h, inner);
}
},

?db.User => {
h.update(if (item) |_| "1" else "0");
},

db.CountStat => {
h.update(item.ulid);
hashUp(h, item.count);
},

db.TimeStat => {
h.update(item.ulid);
h.update(item.time);
},

else => |t| @compileError(@typeName(t)),
}
}

pub fn renderREADME(alloc: std.mem.Allocator, v: db.Version) !string {
var p = try koino.parser.Parser.init(alloc, .{});
try p.feed(v.readme);
Expand Down
5 changes: 3 additions & 2 deletions src/handler/all.zig
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
const std = @import("std");
const http = @import("apple_pie");
const root = @import("root");
const ox = @import("ox").www;

const db = @import("./../db/_db.zig");

Expand All @@ -11,7 +12,7 @@ pub fn users(_: void, response: *http.Response, request: http.Request, captures:

const alloc = request.arena;

try _internal.writePageResponse(alloc, response, request, "/all_users.pek", .{
try ox.writePageResponse(alloc, response, request, "/all_users.pek", .{
.aquila_version = root.version,
.page = "all_users",
.title = "All Users",
Expand All @@ -25,7 +26,7 @@ pub fn packages(_: void, response: *http.Response, request: http.Request, captur

const alloc = request.arena;

try _internal.writePageResponse(alloc, response, request, "/all_packages.pek", .{
try ox.writePageResponse(alloc, response, request, "/all_packages.pek", .{
.aquila_version = root.version,
.page = "all_packages",
.title = "All Packages",
Expand Down
6 changes: 4 additions & 2 deletions src/handler/dashboard.zig
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
const http = @import("apple_pie");
const ox = @import("ox").www;
const root = @import("root");

const db = @import("./../db/_db.zig");

Expand All @@ -14,8 +16,8 @@ pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?
const r = try u.remote(alloc);
const p = try u.packages(alloc);

try _internal.writePageResponse(alloc, response, request, "/dashboard.pek", .{
.aquila_version = @import("root").version,
try ox.writePageResponse(alloc, response, request, "/dashboard.pek", .{
.aquila_version = root.version,
.page = "dashboard",
.title = "Dashboard",
.user = @as(?db.User, u),
Expand Down
25 changes: 13 additions & 12 deletions src/handler/do_import.zig
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const extras = @import("extras");
const root = @import("root");
const zigmod = @import("zigmod");
const git = @import("git");
const ox = @import("ox").www;

const db = @import("./../db/_db.zig");
const cmisc = @import("./../cmisc.zig");
Expand All @@ -15,7 +16,7 @@ pub const Args: ?type = null;

pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?*const anyopaque) !void {
_ = captures;
try _internal.assert(!root.disable_import_repo, response, .forbidden, "error: importing a repository is temporarily disabled.", .{});
try ox.assert(!root.disable_import_repo, response, .forbidden, "error: importing a repository is temporarily disabled.", .{});

const alloc = request.arena;
const u = try _internal.getUser(response, request);
Expand All @@ -24,15 +25,15 @@ pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?
const repo = q.get("repo") orelse return error.HttpNoOp;

for (try u.packages(alloc)) |item| {
try _internal.assert(!std.mem.eql(u8, item.remote_name, repo), response, .bad_request, "error: repository '{s}' has already been initialized.", .{repo});
try ox.assert(!std.mem.eql(u8, item.remote_name, repo), response, .bad_request, "error: repository '{s}' has already been initialized.", .{repo});
}

const r = try u.remote(alloc);

//

const details = r.getRepo(alloc, repo) catch return _internal.fail(response, .internal_server_error, "error: fetching repo from remote failed", .{});
try _internal.assert(std.mem.eql(u8, details.owner, u.name), response, .forbidden, "error: you do not have the authority to manage this package", .{});
const details = r.getRepo(alloc, repo) catch return ox.fail(response, .internal_server_error, "error: fetching repo from remote failed", .{});
try ox.assert(std.mem.eql(u8, details.owner, u.name), response, .forbidden, "error: you do not have the authority to manage this package", .{});

var path = std.mem.span(cmisc.mkdtemp(try alloc.dupeZ(u8, "/tmp/XXXXXX")));

Expand All @@ -42,13 +43,13 @@ pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?
.argv = &.{ "git", "clone", "--recursive", details.clone_url, "." },
.max_output_bytes = std.math.maxInt(usize),
});
try _internal.assert(result1.term == .Exited, response, .bad_request, "error: executing git clone failed: {}", .{result1.term});
try _internal.assert(result1.term.Exited == 0, response, .bad_request, "error: executing tar failed with exit code: {d}\n{s}", .{ result1.term.Exited, result1.stderr });
try ox.assert(result1.term == .Exited, response, .bad_request, "error: executing git clone failed: {}", .{result1.term});
try ox.assert(result1.term.Exited == 0, response, .bad_request, "error: executing tar failed with exit code: {d}\n{s}", .{ result1.term.Exited, result1.stderr });

var dir = try std.fs.cwd().openDir(path, .{ .iterate = true });
defer dir.close();

const modfile = zigmod.ModFile.from_dir(alloc, dir) catch |err| return _internal.fail(response, .bad_request, "error: parsing zig.mod failed: {s}", .{@errorName(err)});
const modfile = zigmod.ModFile.from_dir(alloc, dir) catch |err| return ox.fail(response, .bad_request, "error: parsing zig.mod failed: {s}", .{@errorName(err)});
const name = modfile.name;
const license = modfile.yaml.get_string("license");
const mdesc = modfile.yaml.get("description");
Expand All @@ -62,13 +63,13 @@ pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?
const unpackedsize = try extras.dirSize(alloc, dir);

const cachepath = try std.fs.path.join(alloc, &.{ path, ".zigmod", "deps" });
zigmod.commands.ci.do(alloc, cachepath, dir) catch |err| return _internal.fail(response, .internal_server_error, "error: zigmod ci failed: {s}", .{@errorName(err)});
zigmod.commands.ci.do(alloc, cachepath, dir) catch |err| return ox.fail(response, .internal_server_error, "error: zigmod ci failed: {s}", .{@errorName(err)});
try dir.deleteFile("deps.zig");
const totalsize = try extras.dirSize(alloc, dir);
try dir.deleteTree(".zigmod");

const filelist = try extras.fileList(alloc, dir);
try _internal.assert(filelist.len > 0, response, .internal_server_error, "error: found no files in repo", .{});
try ox.assert(filelist.len > 0, response, .internal_server_error, "error: found no files in repo", .{});

const tarpath = try std.mem.concat(alloc, u8, &.{ path, ".tar.gz" });

Expand All @@ -81,8 +82,8 @@ pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?
.argv = argv,
.cwd = path,
});
try _internal.assert(result2.term == .Exited, response, .internal_server_error, "error: executing tar failed: {}", .{result2.term});
try _internal.assert(result2.term.Exited == 0, response, .internal_server_error, "error: executing tar failed with exit code: {d}\n{s}", .{ result2.term.Exited, result2.stderr });
try ox.assert(result2.term == .Exited, response, .internal_server_error, "error: executing tar failed: {}", .{result2.term});
try ox.assert(result2.term.Exited == 0, response, .internal_server_error, "error: executing tar failed with exit code: {d}\n{s}", .{ result2.term.Exited, result2.stderr });

const tarfile = try std.fs.cwd().openFile(tarpath, .{});
defer tarfile.close();
Expand Down Expand Up @@ -116,5 +117,5 @@ pub fn get(_: void, response: *http.Response, request: http.Request, captures: ?
try std.mem.concat(alloc, u8, &.{ "https://", root.domain, desturl, "/hook?secret=", p.hook_secret }),
);

try _internal.redirectTo(response, try std.mem.concat(alloc, u8, &.{ ".", desturl }));
try ox.redirectTo(response, try std.mem.concat(alloc, u8, &.{ ".", desturl }));
}
Loading

0 comments on commit d7b8714

Please sign in to comment.