Skip to content

Commit

Permalink
Merge pull request #4710 from ziglang/io-stream-iface
Browse files Browse the repository at this point in the history
rework I/O stream abstractions
  • Loading branch information
andrewrk authored Mar 11, 2020
2 parents 571f3ed + 06d2f53 commit 895f67c
Show file tree
Hide file tree
Showing 56 changed files with 2,740 additions and 2,695 deletions.
95 changes: 44 additions & 51 deletions doc/docgen.zig
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,17 @@ pub fn main() !void {
var out_file = try fs.cwd().createFile(out_file_name, .{});
defer out_file.close();

var file_in_stream = in_file.inStream();
const input_file_bytes = try in_file.inStream().readAllAlloc(allocator, max_doc_file_size);

const input_file_bytes = try file_in_stream.stream.readAllAlloc(allocator, max_doc_file_size);

var file_out_stream = out_file.outStream();
var buffered_out_stream = io.BufferedOutStream(fs.File.WriteError).init(&file_out_stream.stream);
var buffered_out_stream = io.bufferedOutStream(out_file.outStream());

var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
var toc = try genToc(allocator, &tokenizer);

try fs.cwd().makePath(tmp_dir_name);
defer fs.deleteTree(tmp_dir_name) catch {};

try genHtml(allocator, &tokenizer, &toc, &buffered_out_stream.stream, zig_exe);
try genHtml(allocator, &tokenizer, &toc, buffered_out_stream.outStream(), zig_exe);
try buffered_out_stream.flush();
}

Expand Down Expand Up @@ -327,8 +324,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
var toc_buf = try std.Buffer.initSize(allocator, 0);
defer toc_buf.deinit();

var toc_buf_adapter = io.BufferOutStream.init(&toc_buf);
var toc = &toc_buf_adapter.stream;
var toc = toc_buf.outStream();

var nodes = std.ArrayList(Node).init(allocator);
defer nodes.deinit();
Expand All @@ -342,7 +338,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
if (header_stack_size != 0) {
return parseError(tokenizer, token, "unbalanced headers", .{});
}
try toc.write(" </ul>\n");
try toc.writeAll(" </ul>\n");
break;
},
Token.Id.Content => {
Expand Down Expand Up @@ -407,7 +403,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
if (last_columns) |n| {
try toc.print("<ul style=\"columns: {}\">\n", .{n});
} else {
try toc.write("<ul>\n");
try toc.writeAll("<ul>\n");
}
} else {
last_action = Action.Open;
Expand All @@ -424,9 +420,9 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {

if (last_action == Action.Close) {
try toc.writeByteNTimes(' ', 8 + header_stack_size * 4);
try toc.write("</ul></li>\n");
try toc.writeAll("</ul></li>\n");
} else {
try toc.write("</li>\n");
try toc.writeAll("</li>\n");
last_action = Action.Close;
}
} else if (mem.eql(u8, tag_name, "see_also")) {
Expand Down Expand Up @@ -614,8 +610,7 @@ fn urlize(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();

var buf_adapter = io.BufferOutStream.init(&buf);
var out = &buf_adapter.stream;
const out = buf.outStream();
for (input) |c| {
switch (c) {
'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {
Expand All @@ -634,19 +629,18 @@ fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();

var buf_adapter = io.BufferOutStream.init(&buf);
var out = &buf_adapter.stream;
const out = buf.outStream();
try writeEscaped(out, input);
return buf.toOwnedSlice();
}

fn writeEscaped(out: var, input: []const u8) !void {
for (input) |c| {
try switch (c) {
'&' => out.write("&amp;"),
'<' => out.write("&lt;"),
'>' => out.write("&gt;"),
'"' => out.write("&quot;"),
'&' => out.writeAll("&amp;"),
'<' => out.writeAll("&lt;"),
'>' => out.writeAll("&gt;"),
'"' => out.writeAll("&quot;"),
else => out.writeByte(c),
};
}
Expand Down Expand Up @@ -681,8 +675,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();

var buf_adapter = io.BufferOutStream.init(&buf);
var out = &buf_adapter.stream;
var out = buf.outStream();
var number_start_index: usize = undefined;
var first_number: usize = undefined;
var second_number: usize = undefined;
Expand Down Expand Up @@ -743,7 +736,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
'm' => {
state = TermState.Start;
while (open_span_count != 0) : (open_span_count -= 1) {
try out.write("</span>");
try out.writeAll("</span>");
}
if (first_number != 0 or second_number != 0) {
try out.print("<span class=\"t{}_{}\">", .{ first_number, second_number });
Expand Down Expand Up @@ -774,7 +767,7 @@ fn isType(name: []const u8) bool {

fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Token, raw_src: []const u8) !void {
const src = mem.trim(u8, raw_src, " \n");
try out.write("<code class=\"zig\">");
try out.writeAll("<code class=\"zig\">");
var tokenizer = std.zig.Tokenizer.init(src);
var index: usize = 0;
var next_tok_is_fn = false;
Expand Down Expand Up @@ -835,15 +828,15 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.Keyword_allowzero,
.Keyword_while,
=> {
try out.write("<span class=\"tok-kw\">");
try out.writeAll("<span class=\"tok-kw\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},

.Keyword_fn => {
try out.write("<span class=\"tok-kw\">");
try out.writeAll("<span class=\"tok-kw\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
next_tok_is_fn = true;
},

Expand All @@ -852,41 +845,41 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.Keyword_true,
.Keyword_false,
=> {
try out.write("<span class=\"tok-null\">");
try out.writeAll("<span class=\"tok-null\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},

.StringLiteral,
.MultilineStringLiteralLine,
.CharLiteral,
=> {
try out.write("<span class=\"tok-str\">");
try out.writeAll("<span class=\"tok-str\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},

.Builtin => {
try out.write("<span class=\"tok-builtin\">");
try out.writeAll("<span class=\"tok-builtin\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},

.LineComment,
.DocComment,
.ContainerDocComment,
.ShebangLine,
=> {
try out.write("<span class=\"tok-comment\">");
try out.writeAll("<span class=\"tok-comment\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},

.Identifier => {
if (prev_tok_was_fn) {
try out.write("<span class=\"tok-fn\">");
try out.writeAll("<span class=\"tok-fn\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
} else {
const is_int = blk: {
if (src[token.start] != 'i' and src[token.start] != 'u')
Expand All @@ -901,9 +894,9 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
break :blk true;
};
if (is_int or isType(src[token.start..token.end])) {
try out.write("<span class=\"tok-type\">");
try out.writeAll("<span class=\"tok-type\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
} else {
try writeEscaped(out, src[token.start..token.end]);
}
Expand All @@ -913,9 +906,9 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.IntegerLiteral,
.FloatLiteral,
=> {
try out.write("<span class=\"tok-number\">");
try out.writeAll("<span class=\"tok-number\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},

.Bang,
Expand Down Expand Up @@ -983,7 +976,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
}
index = token.end;
}
try out.write("</code>");
try out.writeAll("</code>");
}

fn tokenizeAndPrint(docgen_tokenizer: *Tokenizer, out: var, source_token: Token) !void {
Expand All @@ -1002,7 +995,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
for (toc.nodes) |node| {
switch (node) {
.Content => |data| {
try out.write(data);
try out.writeAll(data);
},
.Link => |info| {
if (!toc.urls.contains(info.url)) {
Expand All @@ -1011,12 +1004,12 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
try out.print("<a href=\"#{}\">{}</a>", .{ info.url, info.name });
},
.Nav => {
try out.write(toc.toc);
try out.writeAll(toc.toc);
},
.Builtin => |tok| {
try out.write("<pre>");
try out.writeAll("<pre>");
try tokenizeAndPrintRaw(tokenizer, out, tok, builtin_code);
try out.write("</pre>");
try out.writeAll("</pre>");
},
.HeaderOpen => |info| {
try out.print(
Expand All @@ -1025,15 +1018,15 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
);
},
.SeeAlso => |items| {
try out.write("<p>See also:</p><ul>\n");
try out.writeAll("<p>See also:</p><ul>\n");
for (items) |item| {
const url = try urlize(allocator, item.name);
if (!toc.urls.contains(url)) {
return parseError(tokenizer, item.token, "url not found: {}", .{url});
}
try out.print("<li><a href=\"#{}\">{}</a></li>\n", .{ url, item.name });
}
try out.write("</ul>\n");
try out.writeAll("</ul>\n");
},
.Syntax => |content_tok| {
try tokenizeAndPrint(tokenizer, out, content_tok);
Expand All @@ -1047,9 +1040,9 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
if (!code.is_inline) {
try out.print("<p class=\"file\">{}.zig</p>", .{code.name});
}
try out.write("<pre>");
try out.writeAll("<pre>");
try tokenizeAndPrint(tokenizer, out, code.source_token);
try out.write("</pre>");
try out.writeAll("</pre>");
const name_plus_ext = try std.fmt.allocPrint(allocator, "{}.zig", .{code.name});
const tmp_source_file_name = try fs.path.join(
allocator,
Expand Down
2 changes: 1 addition & 1 deletion doc/langref.html.in
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@
const std = @import("std");

pub fn main() !void {
const stdout = &std.io.getStdOut().outStream().stream;
const stdout = std.io.getStdOut().outStream();
try stdout.print("Hello, {}!\n", .{"world"});
}
{#code_end#}
Expand Down
33 changes: 14 additions & 19 deletions lib/std/atomic/queue.zig
Original file line number Diff line number Diff line change
Expand Up @@ -104,21 +104,17 @@ pub fn Queue(comptime T: type) type {
}

pub fn dump(self: *Self) void {
var stderr_file = std.io.getStdErr() catch return;
const stderr = &stderr_file.outStream().stream;
const Error = @typeInfo(@TypeOf(stderr)).Pointer.child.Error;

self.dumpToStream(Error, stderr) catch return;
self.dumpToStream(std.io.getStdErr().outStream()) catch return;
}

pub fn dumpToStream(self: *Self, comptime Error: type, stream: *std.io.OutStream(Error)) Error!void {
pub fn dumpToStream(self: *Self, stream: var) !void {
const S = struct {
fn dumpRecursive(
s: *std.io.OutStream(Error),
s: var,
optional_node: ?*Node,
indent: usize,
comptime depth: comptime_int,
) Error!void {
) !void {
try s.writeByteNTimes(' ', indent);
if (optional_node) |node| {
try s.print("0x{x}={}\n", .{ @ptrToInt(node), node.data });
Expand Down Expand Up @@ -326,17 +322,16 @@ test "std.atomic.Queue single-threaded" {

test "std.atomic.Queue dump" {
const mem = std.mem;
const SliceOutStream = std.io.SliceOutStream;
var buffer: [1024]u8 = undefined;
var expected_buffer: [1024]u8 = undefined;
var sos = SliceOutStream.init(buffer[0..]);
var fbs = std.io.fixedBufferStream(&buffer);

var queue = Queue(i32).init();

// Test empty stream
sos.reset();
try queue.dumpToStream(SliceOutStream.Error, &sos.stream);
expect(mem.eql(u8, buffer[0..sos.pos],
fbs.reset();
try queue.dumpToStream(fbs.outStream());
expect(mem.eql(u8, buffer[0..fbs.pos],
\\head: (null)
\\tail: (null)
\\
Expand All @@ -350,8 +345,8 @@ test "std.atomic.Queue dump" {
};
queue.put(&node_0);

sos.reset();
try queue.dumpToStream(SliceOutStream.Error, &sos.stream);
fbs.reset();
try queue.dumpToStream(fbs.outStream());

var expected = try std.fmt.bufPrint(expected_buffer[0..],
\\head: 0x{x}=1
Expand All @@ -360,7 +355,7 @@ test "std.atomic.Queue dump" {
\\ (null)
\\
, .{ @ptrToInt(queue.head), @ptrToInt(queue.tail) });
expect(mem.eql(u8, buffer[0..sos.pos], expected));
expect(mem.eql(u8, buffer[0..fbs.pos], expected));

// Test a stream with two elements
var node_1 = Queue(i32).Node{
Expand All @@ -370,8 +365,8 @@ test "std.atomic.Queue dump" {
};
queue.put(&node_1);

sos.reset();
try queue.dumpToStream(SliceOutStream.Error, &sos.stream);
fbs.reset();
try queue.dumpToStream(fbs.outStream());

expected = try std.fmt.bufPrint(expected_buffer[0..],
\\head: 0x{x}=1
Expand All @@ -381,5 +376,5 @@ test "std.atomic.Queue dump" {
\\ (null)
\\
, .{ @ptrToInt(queue.head), @ptrToInt(queue.head.?.next), @ptrToInt(queue.tail) });
expect(mem.eql(u8, buffer[0..sos.pos], expected));
expect(mem.eql(u8, buffer[0..fbs.pos], expected));
}
Loading

0 comments on commit 895f67c

Please sign in to comment.