Skip to content

Commit

Permalink
avoiding macro clash (#854)
Browse files Browse the repository at this point in the history
* avoiding macro clash

* missed this one
  • Loading branch information
lemire authored Jan 29, 2025
1 parent ea07694 commit ff92faa
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 31 deletions.
8 changes: 4 additions & 4 deletions include/ada/url_pattern.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,15 +42,15 @@ enum class url_pattern_part_type : uint8_t {

enum class url_pattern_part_modifier : uint8_t {
// The part does not have a modifier.
NONE,
none,
// The part has an optional modifier indicated by the U+003F (?) code point.
OPTIONAL,
optional,
// The part has a "zero or more" modifier indicated by the U+002A (*) code
// point.
ZERO_OR_MORE,
zero_or_more,
// The part has a "one or more" modifier indicated by the U+002B (+) code
// point.
ONE_OR_MORE,
one_or_more,
};

// @see https://urlpattern.spec.whatwg.org/#part
Expand Down
20 changes: 10 additions & 10 deletions include/ada/url_pattern_helpers-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -425,13 +425,13 @@ inline ada_warn_unused std::optional<errors>
Tokenizer::process_tokenizing_error(size_t next_position,
size_t value_position) {
// If tokenizer’s policy is "strict", then throw a TypeError.
if (policy == token_policy::STRICT) {
if (policy == token_policy::strict) {
ada_log("process_tokenizing_error failed with next_position=",
next_position, " value_position=", value_position);
return errors::type_error;
}
// Assert: tokenizer’s policy is "lenient".
ADA_ASSERT_TRUE(policy == token_policy::LENIENT);
ADA_ASSERT_TRUE(policy == token_policy::lenient);
// Run add a token with default length given tokenizer, "invalid-char", next
// position, and value position.
add_token_with_default_length(token_type::INVALID_CHAR, next_position,
Expand Down Expand Up @@ -535,7 +535,7 @@ url_pattern_parser<F>::maybe_add_part_from_the_pending_fixed_value() {
// Append part to parser’s part list.
parts.emplace_back(url_pattern_part_type::FIXED_TEXT,
std::move(*encoded_value),
url_pattern_part_modifier::NONE);
url_pattern_part_modifier::none);
return std::nullopt;
}

Expand All @@ -544,26 +544,26 @@ std::optional<errors> url_pattern_parser<F>::add_part(
std::string_view prefix, token* name_token, token* regexp_or_wildcard_token,
std::string_view suffix, token* modifier_token) {
// Let modifier be "none".
auto modifier = url_pattern_part_modifier::NONE;
auto modifier = url_pattern_part_modifier::none;
// If modifier token is not null:
if (modifier_token) {
// If modifier token’s value is "?" then set modifier to "optional".
if (modifier_token->value == "?") {
modifier = url_pattern_part_modifier::OPTIONAL;
modifier = url_pattern_part_modifier::optional;
} else if (modifier_token->value == "*") {
// Otherwise if modifier token’s value is "*" then set modifier to
// "zero-or-more".
modifier = url_pattern_part_modifier::ZERO_OR_MORE;
modifier = url_pattern_part_modifier::zero_or_more;
} else if (modifier_token->value == "+") {
// Otherwise if modifier token’s value is "+" then set modifier to
// "one-or-more".
modifier = url_pattern_part_modifier::ONE_OR_MORE;
modifier = url_pattern_part_modifier::one_or_more;
}
}
// If name token is null and regexp or wildcard token is null and modifier
// is "none":
if (!name_token && !regexp_or_wildcard_token &&
modifier == url_pattern_part_modifier::NONE) {
modifier == url_pattern_part_modifier::none) {
// Append prefix to the end of parser’s pending fixed value.
pending_fixed_value.append(prefix);
return std::nullopt;
Expand Down Expand Up @@ -668,7 +668,7 @@ tl::expected<std::vector<url_pattern_part>, errors> parse_pattern_string(
encoding_callback, generate_segment_wildcard_regexp(options));
// Set parser’s token list to the result of running tokenize given input and
// "strict".
auto tokenize_result = tokenize(input, token_policy::STRICT);
auto tokenize_result = tokenize(input, token_policy::strict);
if (!tokenize_result) {
ada_log("parse_pattern_string tokenize failed");
return tl::unexpected(tokenize_result.error());
Expand Down Expand Up @@ -828,7 +828,7 @@ constructor_string_parser<regex_provider>::parse(std::string_view input) {
ada_log("constructor_string_parser::parse input=", input);
// Let parser be a new constructor string parser whose input is input and
// token list is the result of running tokenize given input and "lenient".
auto token_list = tokenize(input, token_policy::LENIENT);
auto token_list = tokenize(input, token_policy::lenient);
if (!token_list) {
return tl::unexpected(token_list.error());
}
Expand Down
4 changes: 2 additions & 2 deletions include/ada/url_pattern_helpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ std::string to_string(token_type type);

// @see https://urlpattern.spec.whatwg.org/#tokenize-policy
enum class token_policy {
STRICT,
LENIENT,
strict,
lenient,
};

// @see https://urlpattern.spec.whatwg.org/#tokens
Expand Down
28 changes: 14 additions & 14 deletions src/url_pattern_helpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ generate_regular_expression_and_name_list(
// If part's type is "fixed-text":
if (part.type == url_pattern_part_type::FIXED_TEXT) {
// If part's modifier is "none"
if (part.modifier == url_pattern_part_modifier::NONE) {
if (part.modifier == url_pattern_part_modifier::none) {
// Append the result of running escape a regexp string given part's
// value
result += escape_regexp_string(part.value);
Expand Down Expand Up @@ -68,8 +68,8 @@ generate_regular_expression_and_name_list(
// string
if (part.prefix.empty() && part.suffix.empty()) {
// If part's modifier is "none" or "optional"
if (part.modifier == url_pattern_part_modifier::NONE ||
part.modifier == url_pattern_part_modifier::OPTIONAL) {
if (part.modifier == url_pattern_part_modifier::none ||
part.modifier == url_pattern_part_modifier::optional) {
// (<regexp value>)<modifier>
result += "(" + regexp_value + ")" +
convert_modifier_to_string(part.modifier);
Expand All @@ -82,8 +82,8 @@ generate_regular_expression_and_name_list(
}

// If part's modifier is "none" or "optional"
if (part.modifier == url_pattern_part_modifier::NONE ||
part.modifier == url_pattern_part_modifier::OPTIONAL) {
if (part.modifier == url_pattern_part_modifier::none ||
part.modifier == url_pattern_part_modifier::optional) {
// (?:<prefix>(<regexp value>)<suffix>)<modifier>
result += "(?:" + escape_regexp_string(part.prefix) + "(" + regexp_value +
")" + escape_regexp_string(part.suffix) + ")" +
Expand All @@ -92,8 +92,8 @@ generate_regular_expression_and_name_list(
}

// Assert: part's modifier is "zero-or-more" or "one-or-more"
ADA_ASSERT_TRUE(part.modifier == url_pattern_part_modifier::ZERO_OR_MORE ||
part.modifier == url_pattern_part_modifier::ONE_OR_MORE);
ADA_ASSERT_TRUE(part.modifier == url_pattern_part_modifier::zero_or_more ||
part.modifier == url_pattern_part_modifier::one_or_more);

// Assert: part's prefix is not the empty string or part's suffix is not the
// empty string
Expand Down Expand Up @@ -131,7 +131,7 @@ generate_regular_expression_and_name_list(
result.append(")");

// If part's modifier is "zero-or-more" then append "?" to the end of result
if (part.modifier == url_pattern_part_modifier::ZERO_OR_MORE) {
if (part.modifier == url_pattern_part_modifier::zero_or_more) {
result += "?";
}
}
Expand Down Expand Up @@ -162,13 +162,13 @@ std::string convert_modifier_to_string(url_pattern_part_modifier modifier) {
// TODO: Optimize this.
switch (modifier) {
// If modifier is "zero-or-more", then return "*".
case url_pattern_part_modifier::ZERO_OR_MORE:
case url_pattern_part_modifier::zero_or_more:
return "*";
// If modifier is "optional", then return "?".
case url_pattern_part_modifier::OPTIONAL:
case url_pattern_part_modifier::optional:
return "?";
// If modifier is "one-or-more", then return "+".
case url_pattern_part_modifier::ONE_OR_MORE:
case url_pattern_part_modifier::one_or_more:
return "+";
// Return the empty string.
default:
Expand Down Expand Up @@ -855,7 +855,7 @@ std::string generate_pattern_string(
// If part’s type is "fixed-text" then:
if (part.type == url_pattern_part_type::FIXED_TEXT) {
// If part’s modifier is "none" then:
if (part.modifier == url_pattern_part_modifier::NONE) {
if (part.modifier == url_pattern_part_modifier::none) {
// Append the result of running escape a pattern string given part’s
// value to the end of result.
result.append(escape_pattern_string(part.value));
Expand Down Expand Up @@ -895,7 +895,7 @@ std::string generate_pattern_string(
// - next part’s suffix is the empty string
if (!needs_grouping && custom_name &&
part.type == url_pattern_part_type::SEGMENT_WILDCARD &&
part.modifier == url_pattern_part_modifier::NONE &&
part.modifier == url_pattern_part_modifier::none &&
next_part.has_value() && next_part->prefix.empty() &&
next_part->suffix.empty()) {
// If next part’s type is "fixed-text":
Expand Down Expand Up @@ -978,7 +978,7 @@ std::string generate_pattern_string(
if (!custom_name &&
(!previous_part.has_value() ||
previous_part->type == url_pattern_part_type::FIXED_TEXT ||
previous_part->modifier != url_pattern_part_modifier::NONE ||
previous_part->modifier != url_pattern_part_modifier::none ||
needs_grouping || !part.prefix.empty())) {
result.append("*");
} else {
Expand Down
2 changes: 1 addition & 1 deletion tests/wpt_urlpattern_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ TEST(wpt_urlpattern_tests, test_regex_difference) {

TEST(wpt_urlpattern_tests, parser_tokenize_basic_tests) {
auto tokenize_result =
tokenize("*", ada::url_pattern_helpers::token_policy::STRICT);
tokenize("*", ada::url_pattern_helpers::token_policy::strict);
ASSERT_TRUE(tokenize_result);
}

Expand Down

0 comments on commit ff92faa

Please sign in to comment.