diff options
author | Robin H. Johnson <robbat2@gentoo.org> | 2015-08-08 13:49:04 -0700 |
---|---|---|
committer | Robin H. Johnson <robbat2@gentoo.org> | 2015-08-08 17:38:18 -0700 |
commit | 56bd759df1d0c750a065b8c845e93d5dfa6b549d (patch) | |
tree | 3f91093cdb475e565ae857f1c5a7fd339e2d781e /net-libs/hubbub | |
download | gentoo-56bd759df1d0c750a065b8c845e93d5dfa6b549d.tar.gz gentoo-56bd759df1d0c750a065b8c845e93d5dfa6b549d.tar.xz |
proj/gentoo: Initial commit
This commit represents a new era for Gentoo:
Storing the gentoo-x86 tree in Git, as converted from CVS.
This commit is the start of the NEW history.
Any historical data is intended to be grafted onto this point.
Creation process:
1. Take final CVS checkout snapshot
2. Remove ALL ChangeLog* files
3. Transform all Manifests to thin
4. Remove empty Manifests
5. Convert all stale $Header$/$Id$ CVS keywords to non-expanded Git $Id$
5.1. Do not touch files with -kb/-ko keyword flags.
Signed-off-by: Robin H. Johnson <robbat2@gentoo.org>
X-Thanks: Alec Warner <antarus@gentoo.org> - did the GSoC 2006 migration tests
X-Thanks: Robin H. Johnson <robbat2@gentoo.org> - infra guy, herding this project
X-Thanks: Nguyen Thai Ngoc Duy <pclouds@gentoo.org> - Former Gentoo developer, wrote Git features for the migration
X-Thanks: Brian Harring <ferringb@gentoo.org> - wrote much python to improve cvs2svn
X-Thanks: Rich Freeman <rich0@gentoo.org> - validation scripts
X-Thanks: Patrick Lauer <patrick@gentoo.org> - Gentoo dev, running new 2014 work in migration
X-Thanks: Michał Górny <mgorny@gentoo.org> - scripts, QA, nagging
X-Thanks: All of other Gentoo developers - many ideas and lots of paint on the bikeshed
Diffstat (limited to 'net-libs/hubbub')
-rw-r--r-- | net-libs/hubbub/Manifest | 1 | ||||
-rw-r--r-- | net-libs/hubbub/files/hubbub-0.1.2-error.patch | 317 | ||||
-rw-r--r-- | net-libs/hubbub/files/hubbub-0.1.2-glibc-2.20.patch | 17 | ||||
-rw-r--r-- | net-libs/hubbub/hubbub-0.1.2-r1.ebuild | 36 | ||||
-rw-r--r-- | net-libs/hubbub/hubbub-0.1.2.ebuild | 79 | ||||
-rw-r--r-- | net-libs/hubbub/metadata.xml | 9 |
6 files changed, 459 insertions, 0 deletions
diff --git a/net-libs/hubbub/Manifest b/net-libs/hubbub/Manifest new file mode 100644 index 00000000000..31e72ebbc5f --- /dev/null +++ b/net-libs/hubbub/Manifest @@ -0,0 +1 @@ +DIST hubbub-0.1.2-src.tar.gz 812684 SHA256 95a1d5a71055b28a8e4ce4dc8516b8f0ed691c5ee03525bf73600495657f1b52 SHA512 c6fee003376a75fcca18ee630308320ae155df0149ceee48ad16bb69420c6a8a053bcfee17d6b315b137408284755028f37782d017873b16304ed041ea7934eb WHIRLPOOL 74c5038eabcf60b641a594300a551c139b4882439b08a70caaf4c22de868f7253dff4abab9eb3e517da86accc73882099ec8f854dcf37d1a7232731ba9d607bc diff --git a/net-libs/hubbub/files/hubbub-0.1.2-error.patch b/net-libs/hubbub/files/hubbub-0.1.2-error.patch new file mode 100644 index 00000000000..a8abcafe5b8 --- /dev/null +++ b/net-libs/hubbub/files/hubbub-0.1.2-error.patch @@ -0,0 +1,317 @@ +--- hubbub-0.1.2/test/csdetect.c ++++ hubbub-0.1.2/test/csdetect.c +@@ -108,7 +108,7 @@ + static int testnum; + + assert(hubbub_charset_extract(data, len, +- &mibenum, &source) == HUBBUB_OK); ++ &mibenum, &source) == (parserutils_error)HUBBUB_OK); + + assert(mibenum != 0); + +--- hubbub-0.1.2/test/parser.c ++++ hubbub-0.1.2/test/parser.c +@@ -24,7 +24,7 @@ + hubbub_parser *parser; + hubbub_parser_optparams params; + FILE *fp; +- size_t len, origlen; ++ size_t len; + uint8_t *buf = alloca(CHUNK_SIZE); + const char *charset; + hubbub_charset_source cssource; +@@ -46,7 +46,7 @@ + } + + fseek(fp, 0, SEEK_END); +- origlen = len = ftell(fp); ++ len = ftell(fp); + fseek(fp, 0, SEEK_SET); + + while (len > 0) { +--- hubbub-0.1.2/test/tokeniser.c ++++ hubbub-0.1.2/test/tokeniser.c +@@ -26,7 +26,7 @@ + hubbub_tokeniser *tok; + hubbub_tokeniser_optparams params; + FILE *fp; +- size_t len, origlen; ++ size_t len; + #define CHUNK_SIZE (4096) + uint8_t buf[CHUNK_SIZE]; + +@@ -44,7 +44,7 @@ + params.token_handler.handler = token_handler; + params.token_handler.pw = NULL; + assert(hubbub_tokeniser_setopt(tok, HUBBUB_TOKENISER_TOKEN_HANDLER, +- ¶ms) == HUBBUB_OK); ++ ¶ms) == (hubbub_error)HUBBUB_OK); + + fp = fopen(argv[1], "rb"); + if (fp == NULL) { +@@ -53,7 +53,7 @@ + } + + fseek(fp, 0, SEEK_END); +- origlen = len = ftell(fp); ++ len = ftell(fp); + fseek(fp, 0, SEEK_SET); + + while (len > 0) { +@@ -63,7 +63,7 @@ + break; + + assert(parserutils_inputstream_append(stream, +- buf, bytes_read) == HUBBUB_OK); ++ buf, bytes_read) == (parserutils_error)HUBBUB_OK); + + + len -= bytes_read; + +--- hubbub-0.1.2/test/tokeniser2.c ++++ hubbub-0.1.2/test/tokeniser2.c +@@ -83,11 +83,9 @@ + printf("Test: %s\n", + json_object_get_string(val)); + } else if (strcmp(key, "input") == 0) { +- int len; + ctx.input = (const uint8_t *) +- json_object_get_string_len(val, +- &len); +- ctx.input_len = len; ++ json_object_get_string(val); ++ ctx.input_len = json_object_get_string_len(val); + } else if (strcmp(key, "output") == 0) { + ctx.output = json_object_get_array(val); + ctx.output_index = 0; +@@ -151,7 +149,7 @@ + ctx->last_start_tag); + + assert(parserutils_inputstream_append(stream, +- buf, len - 1) == HUBBUB_OK); ++ buf, len - 1) == (parserutils_error)HUBBUB_OK); + + assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); + } +@@ -173,7 +171,7 @@ + params.content_model.model = + HUBBUB_CONTENT_MODEL_PCDATA; + } else { +- char *cm = json_object_get_string( ++ const char *cm = json_object_get_string( + (struct json_object *) + array_list_get_idx(ctx->content_model, i)); + +@@ -196,10 +194,10 @@ + ¶ms) == HUBBUB_OK); + + assert(parserutils_inputstream_append(stream, +- ctx->input, ctx->input_len) == HUBBUB_OK); ++ ctx->input, ctx->input_len) == (parserutils_error)HUBBUB_OK); + + assert(parserutils_inputstream_append(stream, NULL, 0) == +- HUBBUB_OK); ++ (parserutils_error)HUBBUB_OK); + + printf("Input: '%.*s' (%d)\n", (int) ctx->input_len, + (const char *) ctx->input, +@@ -271,11 +269,11 @@ + switch (token->type) { + case HUBBUB_TOKEN_DOCTYPE: + { +- char *expname = json_object_get_string( ++ const char *expname = json_object_get_string( + array_list_get_idx(items, 1)); +- char *exppub = json_object_get_string( ++ const char *exppub = json_object_get_string( + array_list_get_idx(items, 2)); +- char *expsys = json_object_get_string( ++ const char *expsys = json_object_get_string( + array_list_get_idx(items, 3)); + bool expquirks = !json_object_get_boolean( + array_list_get_idx(items, 4)); +@@ -332,7 +330,7 @@ + break; + case HUBBUB_TOKEN_START_TAG: + { +- char *expname = json_object_get_string( ++ const char *expname = json_object_get_string( + array_list_get_idx(items, 1)); + struct lh_entry *expattrs = json_object_get_object( + array_list_get_idx(items, 2))->head; +@@ -366,7 +364,7 @@ + + for (i = 0; i < token->data.tag.n_attributes; i++) { + char *expname = (char *) expattrs->k; +- char *expval = json_object_get_string( ++ const char *expval = json_object_get_string( + (struct json_object *) expattrs->v); + const char *gotname = (const char *) + token->data.tag.attributes[i].name.ptr; +@@ -395,7 +393,7 @@ + break; + case HUBBUB_TOKEN_END_TAG: + { +- char *expname = json_object_get_string( ++ const char *expname = json_object_get_string( + array_list_get_idx(items, 1)); + const char *tagname = (const char *) + token->data.tag.name.ptr; +@@ -412,7 +410,7 @@ + break; + case HUBBUB_TOKEN_COMMENT: + { +- char *expstr = json_object_get_string( ++ const char *expstr = json_object_get_string( + array_list_get_idx(items, 1)); + const char *gotstr = (const char *) + token->data.comment.ptr; +@@ -427,9 +425,10 @@ + break; + case HUBBUB_TOKEN_CHARACTER: + { +- int expstrlen; +- char *expstr = json_object_get_string_len( +- array_list_get_idx(items, 1), &expstrlen); ++ int expstrlen = json_object_get_string_len( ++ array_list_get_idx(items, 1)); ++ const char *expstr = json_object_get_string( ++ array_list_get_idx(items, 1)); + const char *gotstr = (const char *) + token->data.character.ptr; + size_t len = min(token->data.character.len, +--- hubbub-0.1.2/test/tokeniser3.c ++++ hubbub-0.1.2/test/tokeniser3.c +@@ -81,11 +81,9 @@ + printf("Test: %s\n", + json_object_get_string(val)); + } else if (strcmp(key, "input") == 0) { +- int len; + ctx.input = (const uint8_t *) +- json_object_get_string_len(val, +- &len); +- ctx.input_len = len; ++ json_object_get_string(val); ++ ctx.input_len = json_object_get_string_len(val); + } else if (strcmp(key, "output") == 0) { + ctx.output = json_object_get_array(val); + ctx.output_index = 0; +@@ -148,7 +146,7 @@ + ctx->last_start_tag); + + assert(parserutils_inputstream_append(stream, +- buf, len - 1) == HUBBUB_OK); ++ buf, len - 1) == (parserutils_error)HUBBUB_OK); + + assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); + } +@@ -170,7 +168,7 @@ + params.content_model.model = + HUBBUB_CONTENT_MODEL_PCDATA; + } else { +- char *cm = json_object_get_string( ++ const char *cm = json_object_get_string( + (struct json_object *) + array_list_get_idx(ctx->content_model, i)); + +@@ -197,13 +197,13 @@ + for (j = 0; j < ctx->input_len; j++) { + assert(parserutils_inputstream_append(stream, + ctx->input + j, 1) == +- HUBBUB_OK); ++ (parserutils_error)HUBBUB_OK); + + assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); + } + + assert(parserutils_inputstream_append(stream, NULL, 0) == +- HUBBUB_OK); ++ (parserutils_error)HUBBUB_OK); + + assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); + +@@ -273,11 +271,11 @@ + switch (token->type) { + case HUBBUB_TOKEN_DOCTYPE: + { +- char *expname = json_object_get_string( ++ const char *expname = json_object_get_string( + array_list_get_idx(items, 1)); +- char *exppub = json_object_get_string( ++ const char *exppub = json_object_get_string( + array_list_get_idx(items, 2)); +- char *expsys = json_object_get_string( ++ const char *expsys = json_object_get_string( + array_list_get_idx(items, 3)); + bool expquirks = !json_object_get_boolean( + array_list_get_idx(items, 4)); +@@ -337,7 +335,7 @@ + break; + case HUBBUB_TOKEN_START_TAG: + { +- char *expname = json_object_get_string( ++ const char *expname = json_object_get_string( + array_list_get_idx(items, 1)); + struct lh_entry *expattrs = json_object_get_object( + array_list_get_idx(items, 2))->head; +@@ -371,7 +369,7 @@ + + for (i = 0; i < token->data.tag.n_attributes; i++) { + char *expname = (char *) expattrs->k; +- char *expval = json_object_get_string( ++ const char *expval = json_object_get_string( + (struct json_object *) expattrs->v); + const char *gotname = (const char *) + token->data.tag.attributes[i].name.ptr; +@@ -400,7 +398,7 @@ + break; + case HUBBUB_TOKEN_END_TAG: + { +- char *expname = json_object_get_string( ++ const char *expname = json_object_get_string( + array_list_get_idx(items, 1)); + const char *tagname = (const char *) + token->data.tag.name.ptr; +@@ -417,7 +415,7 @@ + break; + case HUBBUB_TOKEN_COMMENT: + { +- char *expstr = json_object_get_string( ++ const char *expstr = json_object_get_string( + array_list_get_idx(items, 1)); + const char *gotstr = (const char *) + token->data.comment.ptr; +@@ -432,9 +430,10 @@ + break; + case HUBBUB_TOKEN_CHARACTER: + { +- int expstrlen; +- char *expstr = json_object_get_string_len( +- array_list_get_idx(items, 1), &expstrlen); ++ int expstrlen = json_object_get_string_len( ++ array_list_get_idx(items, 1)); ++ const char *expstr = json_object_get_string( ++ array_list_get_idx(items, 1)); + const char *gotstr = (const char *) + token->data.character.ptr; + size_t len = min(token->data.character.len, +--- hubbub-0.1.2/test/tree.c ++++ hubbub-0.1.2/test/tree.c +@@ -88,7 +88,7 @@ + hubbub_parser *parser; + hubbub_parser_optparams params; + FILE *fp; +- size_t len, origlen; ++ size_t len; + uint8_t *buf = alloca(CHUNK_SIZE); + const char *charset; + hubbub_charset_source cssource; +@@ -123,7 +123,7 @@ + } + + fseek(fp, 0, SEEK_END); +- origlen = len = ftell(fp); ++ len = ftell(fp); + fseek(fp, 0, SEEK_SET); + + while (len > 0) { diff --git a/net-libs/hubbub/files/hubbub-0.1.2-glibc-2.20.patch b/net-libs/hubbub/files/hubbub-0.1.2-glibc-2.20.patch new file mode 100644 index 00000000000..55c38ffddff --- /dev/null +++ b/net-libs/hubbub/files/hubbub-0.1.2-glibc-2.20.patch @@ -0,0 +1,17 @@ +--- work/hubbub-0.1.2/Makefile ++++ work/hubbub-0.1.2/Makefile +@@ -13,13 +13,7 @@ + WARNFLAGS := -Wall -W -Wundef -Wpointer-arith -Wcast-align \ + -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes \ + -Wmissing-declarations -Wnested-externs -pedantic +-# BeOS/Haiku/AmigaOS have standard library errors that issue warnings. +-ifneq ($(TARGET),beos) +- ifneq ($(TARGET),amiga) +- WARNFLAGS := $(WARNFLAGS) -Werror +- endif +-endif +-CFLAGS := -D_BSD_SOURCE -I$(CURDIR)/include/ \ ++CFLAGS := -D_DEFAULT -I$(CURDIR)/include/ \ + -I$(CURDIR)/src $(WARNFLAGS) $(CFLAGS) + ifneq ($(GCCVER),2) + CFLAGS := $(CFLAGS) -std=c99 diff --git a/net-libs/hubbub/hubbub-0.1.2-r1.ebuild b/net-libs/hubbub/hubbub-0.1.2-r1.ebuild new file mode 100644 index 00000000000..12d2d41e78f --- /dev/null +++ b/net-libs/hubbub/hubbub-0.1.2-r1.ebuild @@ -0,0 +1,36 @@ +# Copyright 1999-2014 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Id$ + +EAPI=5 + +inherit netsurf + +DESCRIPTION="HTML5 compliant parsing library, written in C" +HOMEPAGE="http://www.netsurf-browser.org/projects/hubbub/" +SRC_URI="http://download.netsurf-browser.org/libs/releases/${P}-src.tar.gz" + +LICENSE="MIT" +SLOT="0" +KEYWORDS="~amd64 ~arm" +IUSE="debug doc static-libs test" + +RDEPEND="<dev-libs/libparserutils-0.2 + !net-libs/libhubbub" +DEPEND="${RDEPEND} + virtual/pkgconfig + virtual/libiconv + doc? ( app-doc/doxygen ) + test? ( dev-lang/perl + dev-libs/json-c )" + +RESTRICT=test + +PATCHES=( "${FILESDIR}"/${P}-glibc-2.20.patch ) + +src_install() { + netsurf_src_install + + dodoc README docs/{Architecture,Macros,Todo,Treebuilder,Updated} + use doc && dohtml build/docs/html/* +} diff --git a/net-libs/hubbub/hubbub-0.1.2.ebuild b/net-libs/hubbub/hubbub-0.1.2.ebuild new file mode 100644 index 00000000000..7448df01100 --- /dev/null +++ b/net-libs/hubbub/hubbub-0.1.2.ebuild @@ -0,0 +1,79 @@ +# Copyright 1999-2014 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Id$ + +EAPI=5 + +inherit eutils multilib toolchain-funcs + +DESCRIPTION="HTML5 compliant parsing library, written in C" +HOMEPAGE="http://www.netsurf-browser.org/projects/hubbub/" +SRC_URI="http://download.netsurf-browser.org/libs/releases/${P}-src.tar.gz" + +LICENSE="MIT" +SLOT="0" +KEYWORDS="~amd64 ~arm" +IUSE="debug doc static-libs test" + +RDEPEND="<dev-libs/libparserutils-0.2 + !net-libs/libhubbub" +DEPEND="${RDEPEND} + virtual/pkgconfig + virtual/libiconv + doc? ( app-doc/doxygen ) + test? ( dev-lang/perl + dev-libs/json-c )" + +RESTRICT=test + +pkg_setup(){ + netsurf_src_prepare() { + sed -e "/^CCOPT :=/s:=.*:=:" \ + -e "/^CCNOOPT :=/s:=.*:=:" \ + -e "/^CCDBG :=/s:=.*:=:" \ + -i build/makefiles/Makefile.{gcc,clang} || die + sed -e "/^INSTALL_ITEMS/s: /lib: /$(get_libdir):g" \ + -i Makefile || die + sed -e "/^libdir/s:/lib:/$(get_libdir):g" \ + -i ${NETSURF_PKGCONFIG:-${PN}}.pc.in || die + } + netsurf_src_configure() { + echo "Q := " >> Makefile.config + echo "CC := $(tc-getCC)" >> Makefile.config + echo "AR := $(tc-getAR)" >> Makefile.config + } + + netsurf_make() { + emake COMPONENT_TYPE=lib-shared BUILD=$(usex debug debug release) "$@" + use static-libs && \ + emake COMPONENT_TYPE=lib-static BUILD=$(usex debug debug release) "$@" + } +} + +src_prepare() { + NETSURF_PKGCONFIG=lib${PN} + netsurf_src_prepare + + epatch "${FILESDIR}"/${P}-error.patch +} + +src_configure() { + netsurf_src_configure +} + +src_compile() { + netsurf_make + + use doc && emake docs +} + +src_test() { + netsurf_make test +} + +src_install() { + netsurf_make DESTDIR="${D}" PREFIX=/usr install + + dodoc README docs/{Architecture,Macros,Todo,Treebuilder,Updated} + use doc && dohtml build/docs/html/* +} diff --git a/net-libs/hubbub/metadata.xml b/net-libs/hubbub/metadata.xml new file mode 100644 index 00000000000..59acc2cc580 --- /dev/null +++ b/net-libs/hubbub/metadata.xml @@ -0,0 +1,9 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!DOCTYPE pkgmetadata SYSTEM "http://www.gentoo.org/dtd/metadata.dtd"> +<pkgmetadata> +<maintainer> +<email>xmw@gentoo.org</email> +<name>Michael Weber</name> +</maintainer> +</pkgmetadata> + |