2020-02-09 01:28:32 +02:00
|
|
|
[Extensions::] Extension Services.
|
2020-02-08 12:34:58 +02:00
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
Behaviour specific to copies of the extension genre.
|
2020-02-08 12:34:58 +02:00
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
@h Scanning metadata.
|
|
|
|
An extension has a title and an author name, each of which is limited in
|
2020-03-30 15:25:23 +03:00
|
|
|
length to one character less than the following constants:
|
|
|
|
|
|
|
|
@d MAX_EXTENSION_TITLE_LENGTH 51
|
|
|
|
@d MAX_EXTENSION_AUTHOR_LENGTH 51
|
2020-02-17 02:16:38 +02:00
|
|
|
|
|
|
|
=
|
2020-02-08 12:34:58 +02:00
|
|
|
typedef struct inform_extension {
|
|
|
|
struct inbuild_copy *as_copy;
|
|
|
|
struct wording body_text; /* Body of source text supplied in extension, if any */
|
|
|
|
int body_text_unbroken; /* Does this contain text waiting to be sentence-broken? */
|
2023-07-17 02:44:11 +03:00
|
|
|
struct compiled_documentation *documentation; /* or |NULL| if none supplied */
|
2023-04-11 00:54:14 +03:00
|
|
|
int documentation_sought; /* Has it yet been looked for? */
|
2020-02-17 00:01:50 +02:00
|
|
|
int standard; /* the (or perhaps just a) Standard Rules extension */
|
2020-02-08 12:34:58 +02:00
|
|
|
int authorial_modesty; /* Do not credit in the compiled game */
|
2020-02-19 22:48:30 +02:00
|
|
|
struct text_stream *rubric_as_lexed; /* brief description found in opening lines */
|
2020-02-08 12:34:58 +02:00
|
|
|
struct text_stream *extra_credit_as_lexed;
|
2020-02-17 02:16:38 +02:00
|
|
|
struct source_file *read_into_file; /* Which source file loaded this */
|
|
|
|
struct inbuild_requirement *must_satisfy;
|
|
|
|
int loaded_from_built_in_area; /* Located within Inform application */
|
2020-05-05 23:59:02 +03:00
|
|
|
struct inform_project *read_into_project; /* If any */
|
2020-03-05 14:42:33 +02:00
|
|
|
struct parse_node_tree *syntax_tree;
|
2020-02-17 00:01:50 +02:00
|
|
|
struct parse_node *inclusion_sentence; /* Where the source called for this */
|
2021-06-07 00:18:08 +03:00
|
|
|
int auto_included;
|
2020-05-09 18:25:04 +03:00
|
|
|
struct linked_list *search_list; /* of |inbuild_nest| */
|
|
|
|
int word_count; /* or 0 if this hasn't been read (yet) */
|
2022-12-08 01:28:26 +02:00
|
|
|
struct linked_list *activations; /* of |element_activation| */
|
|
|
|
struct linked_list *extensions; /* of |inbuild_requirement| */
|
|
|
|
struct linked_list *kits; /* of |inbuild_requirement| */
|
2023-02-03 01:16:53 +02:00
|
|
|
struct inbuild_nest *materials_nest;
|
2023-07-13 02:23:12 +03:00
|
|
|
int documented_on_this_run;
|
2020-05-09 15:07:39 +03:00
|
|
|
CLASS_DEFINITION
|
2020-02-08 12:34:58 +02:00
|
|
|
} inform_extension;
|
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
@ This is called as soon as a new copy |C| of the extension genre is created.
|
|
|
|
We scan the extension file for the title, author, version number and any
|
|
|
|
compatibility notes given (such as "for Glulx only").
|
|
|
|
|
|
|
|
=
|
2020-03-29 16:48:19 +03:00
|
|
|
void Extensions::scan(inbuild_copy *C) {
|
2020-02-08 12:34:58 +02:00
|
|
|
inform_extension *E = CREATE(inform_extension);
|
2020-02-19 22:48:30 +02:00
|
|
|
E->as_copy = C;
|
2020-05-05 01:34:55 +03:00
|
|
|
Copies::set_metadata(C, STORE_POINTER_inform_extension(E));
|
2020-03-31 02:17:21 +03:00
|
|
|
@<Initialise the extension docket@>;
|
2020-06-28 01:18:54 +03:00
|
|
|
TEMPORARY_TEXT(claimed_author_name)
|
|
|
|
TEMPORARY_TEXT(claimed_title)
|
|
|
|
TEMPORARY_TEXT(reqs)
|
2020-03-31 02:17:21 +03:00
|
|
|
semantic_version_number V = VersionNumbers::null();
|
2023-02-09 00:08:44 +02:00
|
|
|
filename *extension_source_filename = NULL;
|
2020-03-31 02:17:21 +03:00
|
|
|
@<Scan the file@>;
|
|
|
|
@<Change the edition of the copy in light of the metadata found in the scan@>;
|
2023-03-22 07:45:28 +02:00
|
|
|
if (Works::is_basic_inform(C->edition->work)) E->standard = TRUE;
|
|
|
|
if (Works::is_standard_rules(C->edition->work)) E->standard = TRUE;
|
|
|
|
if (C->location_if_path) {
|
|
|
|
text_stream *force_JSON_write = NULL;
|
|
|
|
TEMPORARY_TEXT(JSON_author_name)
|
|
|
|
TEMPORARY_TEXT(JSON_title)
|
|
|
|
@<Scan the metadata file, if there is one@>;
|
|
|
|
@<Check that the JSON metadata agrees@>;
|
|
|
|
if (force_JSON_write) @<Write a corrected JSON metadata file@>;
|
|
|
|
DISCARD_TEXT(JSON_author_name)
|
|
|
|
DISCARD_TEXT(JSON_title)
|
|
|
|
}
|
2020-06-28 01:18:54 +03:00
|
|
|
DISCARD_TEXT(claimed_author_name)
|
|
|
|
DISCARD_TEXT(claimed_title)
|
|
|
|
DISCARD_TEXT(reqs)
|
2020-03-31 02:17:21 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
@<Initialise the extension docket@> =
|
2020-02-08 12:34:58 +02:00
|
|
|
E->body_text = EMPTY_WORDING;
|
|
|
|
E->body_text_unbroken = FALSE;
|
2023-07-17 02:44:11 +03:00
|
|
|
E->documentation = NULL;
|
2023-04-11 00:54:14 +03:00
|
|
|
E->documentation_sought = FALSE;
|
2020-02-17 00:01:50 +02:00
|
|
|
E->standard = FALSE;
|
2020-02-08 12:34:58 +02:00
|
|
|
E->authorial_modesty = FALSE;
|
2020-02-17 02:16:38 +02:00
|
|
|
E->read_into_file = NULL;
|
2020-02-19 22:48:30 +02:00
|
|
|
E->rubric_as_lexed = Str::new();
|
2020-02-08 12:34:58 +02:00
|
|
|
E->extra_credit_as_lexed = NULL;
|
2020-02-17 02:16:38 +02:00
|
|
|
E->must_satisfy = NULL;
|
|
|
|
E->loaded_from_built_in_area = FALSE;
|
2020-05-05 23:59:02 +03:00
|
|
|
E->read_into_project = NULL;
|
2020-05-11 17:21:29 +03:00
|
|
|
E->syntax_tree = SyntaxTree::new();
|
2020-02-17 00:01:50 +02:00
|
|
|
E->inclusion_sentence = NULL;
|
2021-06-07 00:18:08 +03:00
|
|
|
E->auto_included = FALSE;
|
2020-05-05 23:59:02 +03:00
|
|
|
E->search_list = NEW_LINKED_LIST(inbuild_nest);
|
2020-05-09 18:25:04 +03:00
|
|
|
E->word_count = 0;
|
2022-12-08 01:28:26 +02:00
|
|
|
E->activations = NEW_LINKED_LIST(element_activation);
|
|
|
|
E->extensions = NEW_LINKED_LIST(inbuild_requirement);
|
|
|
|
E->kits = NEW_LINKED_LIST(inbuild_requirement);
|
2023-02-03 01:16:53 +02:00
|
|
|
E->materials_nest = NULL;
|
2023-07-13 02:23:12 +03:00
|
|
|
E->documented_on_this_run = FALSE;
|
2022-12-08 01:28:26 +02:00
|
|
|
|
2020-02-19 22:48:30 +02:00
|
|
|
@ The following scans a potential extension file. If it seems malformed, a
|
|
|
|
suitable error is written to the stream |error_text|. If not, this is left
|
|
|
|
alone, and the version number is returned.
|
|
|
|
|
|
|
|
=
|
|
|
|
@<Scan the file@> =
|
2020-06-28 01:18:54 +03:00
|
|
|
TEMPORARY_TEXT(titling_line)
|
|
|
|
TEMPORARY_TEXT(version_text)
|
2022-12-08 01:28:26 +02:00
|
|
|
filename *F = Extensions::main_source_file(C);
|
2020-02-19 22:48:30 +02:00
|
|
|
FILE *EXTF = Filenames::fopen_caseless(F, "r");
|
|
|
|
if (EXTF == NULL) {
|
2023-03-22 07:45:28 +02:00
|
|
|
filename *A = Extensions::alternative_source_file(C->location_if_path);
|
2023-02-09 00:08:44 +02:00
|
|
|
if (A) {
|
|
|
|
EXTF = Filenames::fopen_caseless(A, "r");
|
|
|
|
if (EXTF) {
|
|
|
|
extension_source_filename = A;
|
|
|
|
@<Look inside the file@>;
|
|
|
|
fclose(EXTF);
|
|
|
|
} else Copies::attach_error(C, CopyErrors::new_F(OPEN_FAILED_CE, -1, F));
|
|
|
|
} else Copies::attach_error(C, CopyErrors::new_F(OPEN_FAILED_CE, -1, F));
|
2020-02-19 22:48:30 +02:00
|
|
|
} else {
|
2023-02-09 00:08:44 +02:00
|
|
|
extension_source_filename = F;
|
|
|
|
@<Look inside the file@>;
|
2020-02-19 22:48:30 +02:00
|
|
|
fclose(EXTF);
|
2023-02-09 00:08:44 +02:00
|
|
|
}
|
|
|
|
if (C->location_if_path) {
|
|
|
|
TEMPORARY_TEXT(correct_leafname)
|
|
|
|
WRITE_TO(correct_leafname, "%S.i7x", claimed_title);
|
|
|
|
if (Str::ne_insensitive(correct_leafname, Filenames::get_leafname(extension_source_filename))) {
|
|
|
|
int allow = FALSE;
|
2023-03-22 07:45:28 +02:00
|
|
|
if ((repair_mode) &&
|
2023-02-09 00:08:44 +02:00
|
|
|
(Extensions::rename_file(extension_source_filename, correct_leafname)))
|
|
|
|
allow = TRUE;
|
|
|
|
if (allow == FALSE) {
|
2020-06-28 01:18:54 +03:00
|
|
|
TEMPORARY_TEXT(error_text)
|
2023-02-09 00:08:44 +02:00
|
|
|
WRITE_TO(error_text,
|
|
|
|
"the source file in the extension is called '%S' but should be '%S' to match the contents",
|
|
|
|
Filenames::get_leafname(extension_source_filename), correct_leafname);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_BAD_FILENAME_CE, -1, error_text));
|
2020-06-28 01:18:54 +03:00
|
|
|
DISCARD_TEXT(error_text)
|
2020-02-19 22:48:30 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-06-28 01:18:54 +03:00
|
|
|
DISCARD_TEXT(titling_line)
|
|
|
|
DISCARD_TEXT(version_text)
|
2020-02-19 22:48:30 +02:00
|
|
|
|
2023-02-09 00:08:44 +02:00
|
|
|
@<Look inside the file@> =
|
|
|
|
@<Read the titling line of the extension and normalise its casing@>;
|
|
|
|
@<Read the rubric text, if any is present@>;
|
|
|
|
@<Parse the version, title, author and VM requirements from the titling line@>;
|
|
|
|
if (Str::len(version_text) > 0) {
|
|
|
|
V = VersionNumbers::from_text(version_text);
|
|
|
|
if (VersionNumbers::is_null(V)) {
|
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text, "the version number '%S' is malformed", version_text);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
|
|
|
}
|
|
|
|
}
|
2023-03-22 07:45:28 +02:00
|
|
|
if ((Str::len(version_text) == 0) && (C->location_if_path)) {
|
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text, "an extension stored in a directory must have a version number");
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
|
|
|
V = VersionNumbers::from_text(I"1");
|
|
|
|
}
|
2023-02-09 00:08:44 +02:00
|
|
|
|
2020-02-19 22:48:30 +02:00
|
|
|
@ The titling line is terminated by any of |0A|, |0D|, |0A 0D| or |0D 0A|, or
|
|
|
|
by the local |\n| for good measure.
|
|
|
|
|
|
|
|
@<Read the titling line of the extension and normalise its casing@> =
|
2023-09-06 15:19:32 +03:00
|
|
|
inchar32_t c;
|
|
|
|
int commented_out = FALSE, quoted = FALSE, content_found = FALSE;
|
|
|
|
while ((c = TextFiles::utf8_fgetc(EXTF, NULL, NULL)) != CH32EOF) {
|
2020-02-19 22:48:30 +02:00
|
|
|
if (c == 0xFEFF) continue; /* skip the optional Unicode BOM pseudo-character */
|
2023-05-24 09:46:02 +03:00
|
|
|
if (commented_out) {
|
|
|
|
if (c == ']') commented_out = FALSE;
|
|
|
|
} else if (quoted) {
|
|
|
|
if (c == '"') quoted = FALSE;
|
2023-09-06 15:19:32 +03:00
|
|
|
PUT_TO(titling_line, c);
|
2023-05-24 09:46:02 +03:00
|
|
|
} else {
|
|
|
|
if (c == '[') commented_out = TRUE;
|
|
|
|
else {
|
|
|
|
if (c == '"') quoted = TRUE;
|
|
|
|
else if ((c == '\x0a') || (c == '\x0d') || (c == '\n')) {
|
|
|
|
if (content_found) break;
|
|
|
|
c = ' ';
|
2023-09-06 15:19:32 +03:00
|
|
|
} else if (Characters::is_whitespace(c) == FALSE) {
|
2023-05-24 09:46:02 +03:00
|
|
|
content_found = TRUE;
|
|
|
|
}
|
2023-09-06 15:19:32 +03:00
|
|
|
PUT_TO(titling_line, c);
|
2023-05-24 09:46:02 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (content_found == FALSE) {
|
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text, "extension doesn't have an identifying title line at the top");
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
2020-02-19 22:48:30 +02:00
|
|
|
}
|
2022-05-11 01:39:10 +03:00
|
|
|
Str::trim_white_space(titling_line);
|
2022-05-03 13:02:57 +03:00
|
|
|
Works::normalise_casing_mixed(titling_line);
|
2020-02-19 22:48:30 +02:00
|
|
|
|
|
|
|
@ In the following, all possible newlines are converted to white space, and
|
|
|
|
all white space before a quoted rubric text is ignored. We need to do this
|
|
|
|
partly because users have probably keyed a double line break before the
|
|
|
|
rubric, but also because we might have stopped reading the titling line
|
|
|
|
halfway through a line division combination like |0A 0D|, so that the first
|
|
|
|
thing we read here is a meaningless |0D|.
|
|
|
|
|
|
|
|
@<Read the rubric text, if any is present@> =
|
2023-09-06 15:19:32 +03:00
|
|
|
inchar32_t c;
|
|
|
|
int found_start = FALSE;
|
|
|
|
while ((c = TextFiles::utf8_fgetc(EXTF, NULL, NULL)) != CH32EOF) {
|
2020-02-19 22:48:30 +02:00
|
|
|
if ((c == '\x0a') || (c == '\x0d') || (c == '\n') || (c == '\t')) c = ' ';
|
|
|
|
if ((c != ' ') && (found_start == FALSE)) {
|
|
|
|
if (c == '"') found_start = TRUE;
|
|
|
|
else break;
|
|
|
|
} else {
|
|
|
|
if (c == '"') break;
|
2023-09-06 15:19:32 +03:00
|
|
|
if (found_start) PUT_TO(E->rubric_as_lexed, c);
|
2020-02-19 22:48:30 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@ In general, once case-normalised, a titling line looks like this:
|
|
|
|
|
|
|
|
>> Version 2/070423 Of Going To The Zoo (For Glulx Only) By Cary Grant Begins Here.
|
|
|
|
|
|
|
|
and the version information, the VM restriction and the full stop are all
|
|
|
|
optional, but the division word "of" and the concluding "begin[s] here"
|
|
|
|
are not. We break it up into pieces; for speed, we won't use the lexer to
|
|
|
|
load the entire file.
|
|
|
|
|
|
|
|
@<Parse the version, title, author and VM requirements from the titling line@> =
|
|
|
|
match_results mr = Regexp::create_mr();
|
|
|
|
if (Str::get_last_char(titling_line) == '.') Str::delete_last_character(titling_line);
|
2023-09-05 10:36:51 +03:00
|
|
|
if ((Regexp::match(&mr, titling_line, U"(%c*) Begin Here")) ||
|
|
|
|
(Regexp::match(&mr, titling_line, U"(%c*) Begins Here"))) {
|
2020-02-19 22:48:30 +02:00
|
|
|
Str::copy(titling_line, mr.exp[0]);
|
|
|
|
} else {
|
2023-09-05 10:36:51 +03:00
|
|
|
if ((Regexp::match(&mr, titling_line, U"(%c*) Start Here")) ||
|
|
|
|
(Regexp::match(&mr, titling_line, U"(%c*) Starts Here"))) {
|
2020-02-19 22:48:30 +02:00
|
|
|
Str::copy(titling_line, mr.exp[0]);
|
|
|
|
}
|
2020-03-29 19:39:17 +03:00
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1,
|
2020-02-19 22:48:30 +02:00
|
|
|
I"the opening line does not end 'begin(s) here'"));
|
|
|
|
}
|
|
|
|
@<Scan the version text, if any, and advance to the position past Version... Of@>;
|
2023-09-05 10:36:51 +03:00
|
|
|
if (Regexp::match(&mr, titling_line, U"The (%c*)")) Str::copy(titling_line, mr.exp[0]);
|
2020-02-19 22:48:30 +02:00
|
|
|
@<Divide the remaining text into a claimed author name and title, divided by By@>;
|
|
|
|
@<Extract the VM requirements text, if any, from the claimed title@>;
|
|
|
|
Regexp::dispose_of(&mr);
|
|
|
|
|
|
|
|
@ We make no attempt to check the version number for validity: the purpose
|
|
|
|
of the census is to identify extensions and reject accidentally included
|
|
|
|
other files, not to syntax-check all extensions to see if they would work
|
|
|
|
if used.
|
|
|
|
|
|
|
|
@<Scan the version text, if any, and advance to the position past Version... Of@> =
|
2023-09-05 10:36:51 +03:00
|
|
|
if (Regexp::match(&mr, titling_line, U"Version (%c*?) Of (%c*)")) {
|
2020-02-19 22:48:30 +02:00
|
|
|
Str::copy(version_text, mr.exp[0]);
|
|
|
|
Str::copy(titling_line, mr.exp[1]);
|
|
|
|
}
|
|
|
|
|
|
|
|
@ The earliest "by" is the divider: note that extension titles are not
|
|
|
|
allowed to contain this word, so "North By Northwest By Cary Grant" is
|
|
|
|
not a situation we need to contend with.
|
|
|
|
|
|
|
|
@<Divide the remaining text into a claimed author name and title, divided by By@> =
|
2023-05-23 18:50:12 +03:00
|
|
|
int quote_found = FALSE, brackets_underflowed = FALSE, brackets_in_author = FALSE;
|
|
|
|
int which = 1, bl = 0;
|
|
|
|
for (int i=0; i<Str::len(titling_line); i++) {
|
2023-09-05 10:36:51 +03:00
|
|
|
inchar32_t c = Str::get_at(titling_line, i);
|
2023-05-23 18:50:12 +03:00
|
|
|
if (c == '(') { bl++; if (which == 2) brackets_in_author = TRUE; }
|
|
|
|
if (c == ')') { bl--; if (bl < 0) brackets_underflowed = TRUE; }
|
|
|
|
if (c == '\"') quote_found = TRUE;
|
|
|
|
if ((bl == 0) && (Str::includes_at(titling_line, i, I" By "))) {
|
|
|
|
if (which == 1) {
|
|
|
|
i += 3;
|
|
|
|
which = 2;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (which == 1) PUT_TO(claimed_title, c);
|
|
|
|
else PUT_TO(claimed_author_name, c);
|
|
|
|
}
|
|
|
|
if ((bl != 0) || (brackets_underflowed))
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1,
|
|
|
|
I"brackets '(' and ')' are used in an unbalanced way in the titling line"));
|
|
|
|
else if (brackets_in_author)
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1,
|
|
|
|
I"brackets '(' and ')' are used as part of the author name in the titling line"));
|
|
|
|
if (quote_found)
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1,
|
|
|
|
I"the titling line includes a double-quotation mark"));
|
|
|
|
if (which == 1)
|
2020-03-29 19:39:17 +03:00
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1,
|
2020-02-19 22:48:30 +02:00
|
|
|
I"the titling line does not give both author and title"));
|
2023-05-24 09:46:02 +03:00
|
|
|
Str::trim_white_space(claimed_title);
|
|
|
|
Str::trim_white_space(claimed_author_name);
|
2020-02-19 22:48:30 +02:00
|
|
|
|
|
|
|
@ Similarly, extension titles are not allowed to contain parentheses, so
|
|
|
|
this is unambiguous.
|
|
|
|
|
|
|
|
@<Extract the VM requirements text, if any, from the claimed title@> =
|
2023-09-05 10:36:51 +03:00
|
|
|
if (Regexp::match(&mr, claimed_title, U"(%c*?) *(%(%c*%))")) {
|
2020-02-19 22:48:30 +02:00
|
|
|
Str::copy(claimed_title, mr.exp[0]);
|
2020-02-20 02:39:36 +02:00
|
|
|
Str::copy(reqs, mr.exp[1]);
|
2020-02-19 22:48:30 +02:00
|
|
|
}
|
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
@<Change the edition of the copy in light of the metadata found in the scan@> =
|
|
|
|
if (Str::len(claimed_title) == 0) { WRITE_TO(claimed_title, "Unknown"); }
|
|
|
|
if (Str::len(claimed_author_name) == 0) { WRITE_TO(claimed_author_name, "Anonymous"); }
|
2020-05-05 23:59:02 +03:00
|
|
|
if (Str::len(claimed_title) > MAX_EXTENSION_TITLE_LENGTH)
|
|
|
|
Copies::attach_error(C,
|
|
|
|
CopyErrors::new_N(EXT_TITLE_TOO_LONG_CE, -1, Str::len(claimed_title)));
|
|
|
|
if (Str::len(claimed_author_name) > MAX_EXTENSION_AUTHOR_LENGTH)
|
|
|
|
Copies::attach_error(C,
|
|
|
|
CopyErrors::new_N(EXT_AUTHOR_TOO_LONG_CE, -1, Str::len(claimed_author_name)));
|
|
|
|
C->edition = Editions::new(
|
2023-07-23 17:28:37 +03:00
|
|
|
Works::new_raw(C->edition->work->genre, claimed_title, claimed_author_name), V);
|
2020-03-31 02:17:21 +03:00
|
|
|
if (Str::len(reqs) > 0) {
|
|
|
|
compatibility_specification *CS = Compatibility::from_text(reqs);
|
|
|
|
if (CS) C->edition->compatibility = CS;
|
|
|
|
else {
|
2020-06-28 01:18:54 +03:00
|
|
|
TEMPORARY_TEXT(err)
|
2020-03-31 02:17:21 +03:00
|
|
|
WRITE_TO(err, "cannot read compatibility '%S'", reqs);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(EXT_MISWORDED_CE, -1, err));
|
2020-06-28 01:18:54 +03:00
|
|
|
DISCARD_TEXT(err)
|
2020-03-31 02:17:21 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-08 01:28:26 +02:00
|
|
|
@<Scan the metadata file, if there is one@> =
|
2023-03-22 07:45:28 +02:00
|
|
|
filename *F = Filenames::in(C->location_if_path, I"extension_metadata.json");
|
|
|
|
if (TextFiles::exists(F)) {
|
|
|
|
JSONMetadata::read_metadata_file(C, F, JSON_title, JSON_author_name);
|
2022-12-08 01:28:26 +02:00
|
|
|
if (C->metadata_record) {
|
|
|
|
@<Extract activations@>;
|
|
|
|
JSON_value *extension_details =
|
|
|
|
JSON::look_up_object(C->metadata_record, I"extension-details");
|
|
|
|
if (extension_details) @<Extract the extension details@>;
|
|
|
|
JSON_value *needs = JSON::look_up_object(C->metadata_record, I"needs");
|
|
|
|
if (needs) {
|
|
|
|
JSON_value *V;
|
|
|
|
LOOP_OVER_LINKED_LIST(V, JSON_value, needs->if_list)
|
|
|
|
@<Extract this possibly conditional requirement@>;
|
|
|
|
}
|
|
|
|
}
|
2023-03-22 07:45:28 +02:00
|
|
|
} else {
|
2023-06-02 00:24:00 +03:00
|
|
|
SVEXPLAIN(2, "(no JSON metadata file found at %f)\n", F);
|
2023-03-22 07:45:28 +02:00
|
|
|
if (repair_mode) {
|
|
|
|
force_JSON_write = I"the JSON file is currently missing";
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text, "the extension directory contains no 'extension_metadata.json' file");
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
|
|
|
}
|
2022-12-08 01:28:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@<Extract activations@> =
|
|
|
|
JSON_value *activates = JSON::look_up_object(C->metadata_record, I"activates");
|
|
|
|
if (activates) {
|
|
|
|
JSON_value *V;
|
|
|
|
LOOP_OVER_LINKED_LIST(V, JSON_value, activates->if_list)
|
|
|
|
Extensions::activation(E, V->if_string, TRUE);
|
|
|
|
}
|
|
|
|
JSON_value *deactivates = JSON::look_up_object(C->metadata_record, I"deactivates");
|
|
|
|
if (deactivates) {
|
|
|
|
JSON_value *V;
|
|
|
|
LOOP_OVER_LINKED_LIST(V, JSON_value, deactivates->if_list)
|
|
|
|
Extensions::activation(E, V->if_string, FALSE);
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Extract the extension details@> =
|
|
|
|
;
|
|
|
|
|
|
|
|
@<Extract this possibly conditional requirement@> =
|
|
|
|
int parity = TRUE;
|
|
|
|
JSON_value *if_clause = JSON::look_up_object(V, I"if");
|
|
|
|
JSON_value *unless_clause = JSON::look_up_object(V, I"unless");
|
|
|
|
if (unless_clause) {
|
|
|
|
if_clause = unless_clause; parity = FALSE;
|
|
|
|
}
|
|
|
|
if (if_clause) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "extension dependencies must be unconditional");
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
|
|
|
JSON_value *need_clause = JSON::look_up_object(V, I"need");
|
|
|
|
if (need_clause) {
|
|
|
|
JSON_value *need_type = JSON::look_up_object(need_clause, I"type");
|
|
|
|
JSON_value *need_title = JSON::look_up_object(need_clause, I"title");
|
|
|
|
JSON_value *need_author = JSON::look_up_object(need_clause, I"author");
|
|
|
|
JSON_value *need_version = JSON::look_up_object(need_clause, I"version");
|
|
|
|
if (Str::eq(need_type->if_string, I"extension"))
|
|
|
|
@<Deal with an extension dependency@>
|
|
|
|
else if (Str::eq(need_type->if_string, I"kit"))
|
|
|
|
@<Deal with a kit dependency@>
|
|
|
|
else {
|
|
|
|
TEMPORARY_TEXT(err)
|
2023-03-22 07:45:28 +02:00
|
|
|
WRITE_TO(err, "an extension can only have extensions and kits as dependencies");
|
2022-12-08 01:28:26 +02:00
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Deal with an extension dependency@> =
|
|
|
|
text_stream *extension_title = need_title->if_string;
|
|
|
|
text_stream *extension_author = need_author?(need_author->if_string):NULL;
|
|
|
|
inbuild_work *work = Works::new(extension_genre, extension_title, extension_author);
|
|
|
|
if (need_version) @<Add versioned extension@>
|
|
|
|
else @<Add unversioned extension@>;
|
|
|
|
|
|
|
|
@<Add versioned extension@> =
|
|
|
|
semantic_version_number V = VersionNumbers::from_text(need_version->if_string);
|
|
|
|
if (VersionNumbers::is_null(V)) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "cannot read version number '%S'", need_version->if_string);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
} else {
|
|
|
|
inbuild_requirement *req = Requirements::new(work,
|
|
|
|
VersionNumberRanges::compatibility_range(V));
|
|
|
|
ADD_TO_LINKED_LIST(req, inbuild_requirement, E->extensions);
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Add unversioned extension@> =
|
|
|
|
inbuild_requirement *req = Requirements::any_version_of(work);
|
|
|
|
ADD_TO_LINKED_LIST(req, inbuild_requirement, E->extensions);
|
|
|
|
|
|
|
|
@<Deal with a kit dependency@> =
|
|
|
|
text_stream *kit_title = need_title->if_string;
|
|
|
|
text_stream *kit_author = need_author?(need_author->if_string):NULL;
|
|
|
|
inbuild_work *work = Works::new(kit_genre, kit_title, kit_author);
|
|
|
|
if (need_version) @<Add versioned kit@>
|
|
|
|
else @<Add unversioned kit@>;
|
|
|
|
|
|
|
|
@<Add versioned kit@> =
|
|
|
|
semantic_version_number V = VersionNumbers::from_text(need_version->if_string);
|
|
|
|
if (VersionNumbers::is_null(V)) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "cannot read version number '%S'", need_version->if_string);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
} else {
|
|
|
|
inbuild_requirement *req = Requirements::new(work,
|
|
|
|
VersionNumberRanges::compatibility_range(V));
|
|
|
|
ADD_TO_LINKED_LIST(req, inbuild_requirement, E->kits);
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Add unversioned kit@> =
|
|
|
|
inbuild_requirement *req = Requirements::any_version_of(work);
|
|
|
|
ADD_TO_LINKED_LIST(req, inbuild_requirement, E->kits);
|
|
|
|
|
2023-03-22 07:45:28 +02:00
|
|
|
@ If the JSON file disagrees with the source of the extension about any one of the
|
|
|
|
version number, the title, or the author name, or indeed is simply absent, then we
|
|
|
|
need to detect that and either flag an error, or force a repair.
|
|
|
|
|
|
|
|
@<Check that the JSON metadata agrees@> =
|
2022-12-08 01:28:26 +02:00
|
|
|
semantic_version_number V2 = C->edition->version;
|
|
|
|
if (VersionNumbers::ne(V, V2)) {
|
2023-03-22 07:45:28 +02:00
|
|
|
if (repair_mode) {
|
|
|
|
force_JSON_write = I"the JSON file gives the wrong version number";
|
|
|
|
C->edition->version = V;
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text, "the extension itself gives version number '%v', "
|
|
|
|
"but the metadata file says '%v': these need to match", &V, &V2);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (Str::ne(JSON_title, C->edition->work->title)) {
|
|
|
|
if (repair_mode) {
|
|
|
|
force_JSON_write = I"the JSON file gives the wrong title";
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text, "the extension itself gives title '%S', "
|
|
|
|
"but the metadata file says '%S': these need to match",
|
|
|
|
C->edition->work->title, JSON_title);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (Str::ne(JSON_author_name, C->edition->work->author_name)) {
|
|
|
|
if (repair_mode) {
|
|
|
|
force_JSON_write = I"the JSON file gives the wrong author name";
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text, "the extension itself gives author name '%S', "
|
|
|
|
"but the metadata file says '%S': these need to match",
|
|
|
|
C->edition->work->author_name, JSON_author_name);
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@ This is where incorrect or missing JSON metadata is repaired. If there was metadata
|
|
|
|
at all, we rewrite it with the correct author, title and version. If not, we create it
|
|
|
|
in a minimal sort of way, with just an |is| object.
|
|
|
|
|
|
|
|
@<Write a corrected JSON metadata file@> =
|
|
|
|
if (repair_mode == FALSE)
|
|
|
|
internal_error("should not try to write JSON except in repair mode");
|
|
|
|
if (C->location_if_path == NULL)
|
|
|
|
internal_error("should not try to write JSON except for a directory extension");
|
|
|
|
JSON_value *is_object = NULL;
|
|
|
|
@<Find or create the is-object@>;
|
|
|
|
@<Populate the is-object with correct values@>;
|
|
|
|
@<Write the JSON metadata back to the filing system@>;
|
|
|
|
|
|
|
|
@<Find or create the is-object@> =
|
|
|
|
if (C->metadata_record) is_object = JSON::look_up_object(C->metadata_record, I"is");
|
|
|
|
if (is_object == NULL) {
|
|
|
|
is_object = JSON::new_object();
|
|
|
|
C->metadata_record = JSON::new_object();
|
|
|
|
JSON::add_to_object(C->metadata_record, I"is", is_object);
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Populate the is-object with correct values@> =
|
|
|
|
JSON::change_object(is_object, I"type", JSON::new_string(I"extension"));
|
|
|
|
JSON::change_object(is_object, I"title", JSON::new_string(C->edition->work->title));
|
|
|
|
JSON::change_object(is_object, I"author", JSON::new_string(C->edition->work->author_name));
|
|
|
|
TEMPORARY_TEXT(v)
|
|
|
|
WRITE_TO(v, "%v", &(C->edition->version));
|
|
|
|
JSON::change_object(is_object, I"version", JSON::new_string(v));
|
|
|
|
DISCARD_TEXT(v)
|
|
|
|
|
|
|
|
@<Write the JSON metadata back to the filing system@> =
|
|
|
|
filename *F = Filenames::in(C->location_if_path, I"extension_metadata.json");
|
|
|
|
text_stream JSONF_struct;
|
|
|
|
text_stream *OUT = &JSONF_struct;
|
|
|
|
if (STREAM_OPEN_TO_FILE(OUT, F, UTF8_ENC) == FALSE) {
|
2022-12-08 01:28:26 +02:00
|
|
|
TEMPORARY_TEXT(error_text)
|
2023-03-22 07:45:28 +02:00
|
|
|
WRITE_TO(error_text, "extension metadata file 'extension_metadata.json' was missing "
|
|
|
|
"or incorrect, and I was unable to write a better one");
|
2022-12-08 01:28:26 +02:00
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
2023-03-22 07:45:28 +02:00
|
|
|
} else {
|
|
|
|
JSON::encode(OUT, C->metadata_record);
|
|
|
|
STREAM_CLOSE(OUT);
|
|
|
|
WRITE_TO(STDERR, "(Writing JSON metadata file to %f, because %S)\n", F, force_JSON_write);
|
2022-12-08 01:28:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@ Language elements can be activated or deactivated:
|
|
|
|
|
|
|
|
=
|
|
|
|
void Extensions::activation(inform_extension *E, text_stream *name, int act) {
|
|
|
|
element_activation *EA = CREATE(element_activation);
|
|
|
|
EA->element_name = Str::duplicate(name);
|
|
|
|
EA->activate = act;
|
|
|
|
ADD_TO_LINKED_LIST(EA, element_activation, E->activations);
|
|
|
|
}
|
|
|
|
|
|
|
|
@ Since there are two ways extensions can be stored:
|
|
|
|
|
|
|
|
=
|
|
|
|
inform_extension *Extensions::from_copy(inbuild_copy *C) {
|
|
|
|
inform_extension *ext = ExtensionBundleManager::from_copy(C);
|
|
|
|
if (ext == NULL) ext = ExtensionManager::from_copy(C);
|
|
|
|
return ext;
|
|
|
|
}
|
|
|
|
|
|
|
|
filename *Extensions::main_source_file(inbuild_copy *C) {
|
|
|
|
filename *F = C->location_if_file;
|
|
|
|
if (F == NULL) {
|
|
|
|
pathname *P = C->location_if_path;
|
|
|
|
if (P) {
|
|
|
|
TEMPORARY_TEXT(leaf)
|
2023-02-02 01:17:23 +02:00
|
|
|
WRITE_TO(leaf, "%S.i7x", C->edition->work->title);
|
2022-12-08 01:28:26 +02:00
|
|
|
F = Filenames::in(Pathnames::down(P, I"Source"), leaf);
|
|
|
|
DISCARD_TEXT(leaf)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return F;
|
|
|
|
}
|
|
|
|
|
2023-03-22 07:45:28 +02:00
|
|
|
filename *Extensions::alternative_source_file(pathname *P) {
|
2023-02-09 00:08:44 +02:00
|
|
|
if (P) {
|
|
|
|
P = Pathnames::down(P, I"Source");
|
|
|
|
linked_list *L = Directories::listing(P);
|
2023-03-22 07:45:28 +02:00
|
|
|
filename *A = NULL;
|
2023-02-09 00:08:44 +02:00
|
|
|
text_stream *entry;
|
|
|
|
LOOP_OVER_LINKED_LIST(entry, text_stream, L) {
|
|
|
|
if (Platform::is_folder_separator(Str::get_last_char(entry)) == FALSE) {
|
|
|
|
filename *F = Filenames::in(P, entry);
|
|
|
|
TEMPORARY_TEXT(fext)
|
|
|
|
Filenames::write_extension(fext, F);
|
2023-03-22 07:45:28 +02:00
|
|
|
if (Str::eq_insensitive(fext, I".i7x")) A = F;
|
|
|
|
DISCARD_TEXT(fext)
|
|
|
|
if (A) break;
|
2023-02-09 00:08:44 +02:00
|
|
|
}
|
|
|
|
}
|
2023-03-22 07:45:28 +02:00
|
|
|
return A;
|
2023-02-09 00:08:44 +02:00
|
|
|
}
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2023-01-31 00:15:25 +02:00
|
|
|
pathname *Extensions::materials_path(inform_extension *E) {
|
|
|
|
pathname *P = E->as_copy->location_if_path;
|
|
|
|
if (P) P = Pathnames::down(P, I"Materials");
|
|
|
|
return P;
|
|
|
|
}
|
|
|
|
|
2023-02-03 01:16:53 +02:00
|
|
|
inbuild_nest *Extensions::materials_nest(inform_extension *E) {
|
|
|
|
pathname *P = Extensions::materials_path(E);
|
|
|
|
if ((E->materials_nest == NULL) && (P)) {
|
|
|
|
E->materials_nest = Nests::new(P);
|
|
|
|
Nests::set_tag(E->materials_nest, EXTENSION_NEST_TAG);
|
|
|
|
}
|
|
|
|
return E->materials_nest;
|
|
|
|
}
|
|
|
|
|
2020-05-09 18:25:04 +03:00
|
|
|
@h Cached metadata.
|
|
|
|
|
|
|
|
=
|
|
|
|
void Extensions::set_word_count(inform_extension *E, int wc) {
|
|
|
|
E->word_count = wc;
|
|
|
|
}
|
|
|
|
|
|
|
|
int Extensions::get_word_count(inform_extension *E) {
|
|
|
|
return E->word_count;
|
|
|
|
}
|
|
|
|
|
|
|
|
text_stream *Extensions::get_sort_word_count(inform_extension *E) {
|
|
|
|
text_stream *T = Str::new();
|
|
|
|
WRITE_TO(T, "%8d", E->word_count);
|
|
|
|
return T;
|
|
|
|
}
|
|
|
|
|
|
|
|
int Extensions::compare_by_edition(inform_extension *E1, inform_extension *E2) {
|
|
|
|
if ((E1 == NULL) || (E2 == NULL)) internal_error("bad work match");
|
2020-07-18 12:27:25 +03:00
|
|
|
int d = Works::cmp(E1->as_copy->edition->work, E2->as_copy->edition->work);
|
2020-05-09 18:25:04 +03:00
|
|
|
if (d != 0) return d;
|
|
|
|
return VersionNumbers::cmp(
|
|
|
|
E1->as_copy->edition->version, E2->as_copy->edition->version);
|
|
|
|
}
|
|
|
|
|
|
|
|
int Extensions::compare_by_author(inform_extension *E1, inform_extension *E2) {
|
|
|
|
if ((E1 == NULL) || (E2 == NULL)) internal_error("bad work match");
|
|
|
|
int d = Str::cmp(E2->as_copy->edition->work->author_name,
|
|
|
|
E1->as_copy->edition->work->author_name);
|
|
|
|
if (d != 0) return d;
|
|
|
|
return Extensions::compare_by_edition(E1, E2);
|
|
|
|
}
|
|
|
|
|
|
|
|
int Extensions::compare_by_title(inform_extension *E1, inform_extension *E2) {
|
|
|
|
if ((E1 == NULL) || (E2 == NULL)) internal_error("bad work match");
|
|
|
|
int d = Str::cmp(E2->as_copy->edition->work->title,
|
|
|
|
E1->as_copy->edition->work->title);
|
|
|
|
if (d != 0) return d;
|
|
|
|
return Extensions::compare_by_edition(E1, E2);
|
|
|
|
}
|
|
|
|
|
|
|
|
int Extensions::compare_by_length(inform_extension *E1, inform_extension *E2) {
|
|
|
|
if ((E1 == NULL) || (E2 == NULL)) internal_error("bad work match");
|
|
|
|
int d = Str::cmp(
|
|
|
|
Extensions::get_sort_word_count(E2), Extensions::get_sort_word_count(E1));
|
|
|
|
if (d != 0) return d;
|
|
|
|
return Extensions::compare_by_edition(E1, E2);
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h Search list.
|
|
|
|
Sometimes ane extension is being looked at in isolation, and then |read_into_project|
|
|
|
|
will be |NULL|; but if it is being loaded to be included in the source text of a
|
|
|
|
project, then...
|
|
|
|
|
|
|
|
=
|
|
|
|
void Extensions::set_associated_project(inform_extension *E, inform_project *P) {
|
|
|
|
E->read_into_project = P;
|
|
|
|
}
|
|
|
|
|
|
|
|
@ ...and this affects its search list, because now its own inclusions can see
|
|
|
|
the Materials folder of the project in question:
|
|
|
|
|
|
|
|
=
|
|
|
|
linked_list *Extensions::nest_list(inform_extension *E) {
|
|
|
|
if (E == NULL) return Supervisor::shared_nest_list();
|
|
|
|
RUN_ONLY_FROM_PHASE(NESTED_INBUILD_PHASE)
|
|
|
|
if (LinkedLists::len(E->search_list) == 0) {
|
|
|
|
inform_project *proj = E->read_into_project;
|
|
|
|
if (proj) ADD_TO_LINKED_LIST(proj->materials_nest, inbuild_nest, E->search_list);
|
|
|
|
inbuild_nest *N;
|
|
|
|
linked_list *L = Supervisor::shared_nest_list();
|
|
|
|
LOOP_OVER_LINKED_LIST(N, inbuild_nest, L)
|
|
|
|
ADD_TO_LINKED_LIST(N, inbuild_nest, E->search_list);
|
|
|
|
}
|
|
|
|
return E->search_list;
|
|
|
|
}
|
|
|
|
|
2022-12-08 01:28:26 +02:00
|
|
|
@h Language element activation.
|
|
|
|
Note that this function is meaningful only when this module is part of the
|
|
|
|
|inform7| executable, and it invites us to activate or deactivate language
|
|
|
|
features as |E| would like.
|
|
|
|
|
|
|
|
=
|
2022-12-11 01:50:28 +02:00
|
|
|
void Extensions::activate_elements(inform_extension *E, inform_project *proj) {
|
2022-12-08 01:28:26 +02:00
|
|
|
element_activation *EA;
|
|
|
|
LOOP_OVER_LINKED_LIST(EA, element_activation, E->activations) {
|
|
|
|
compiler_feature *P = Features::from_name(EA->element_name);
|
|
|
|
if (P == NULL) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "extension metadata refers to unknown compiler feature '%S'", EA->element_name);
|
|
|
|
Copies::attach_error(E->as_copy, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
} else {
|
|
|
|
if (EA->activate) Features::activate(P);
|
|
|
|
else if (Features::deactivate(P) == FALSE) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "extension metadata asks to deactivate mandatory compiler feature '%S'",
|
|
|
|
EA->element_name);
|
|
|
|
Copies::attach_error(E->as_copy, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-12-11 01:50:28 +02:00
|
|
|
linked_list *L = NEW_LINKED_LIST(inbuild_nest);
|
2023-02-03 01:16:53 +02:00
|
|
|
inbuild_nest *N = Extensions::materials_nest(E);
|
2022-12-11 01:50:28 +02:00
|
|
|
ADD_TO_LINKED_LIST(N, inbuild_nest, L);
|
|
|
|
inbuild_requirement *req;
|
|
|
|
LOOP_OVER_LINKED_LIST(req, inbuild_requirement, E->kits) {
|
2023-01-31 00:15:25 +02:00
|
|
|
if (Projects::add_kit_dependency(proj,
|
|
|
|
req->work->raw_title, NULL, NULL, NULL, L) == FALSE) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err,
|
|
|
|
"extension metadata says that the extension contains the kit '%S', but it doesn't",
|
|
|
|
req->work->raw_title);
|
|
|
|
Copies::attach_error(E->as_copy, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
2022-12-11 01:50:28 +02:00
|
|
|
}
|
2022-12-08 01:28:26 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h Graph.
|
2020-05-06 01:52:20 +03:00
|
|
|
The dependency graph is not so much constructed as discovered; dependencies
|
|
|
|
are made to each other extension as it's Included in this one, during the
|
|
|
|
course of reading in the text.
|
|
|
|
|
|
|
|
Note that this function is not called when graphing a project which Includes
|
|
|
|
this extension: this is called only when //inbuild// wants to see the graph
|
|
|
|
of an extension in isolation from projects. (That's why we must perform the
|
|
|
|
Inclusion traverse: for a project this traverse would come later, but with
|
|
|
|
no project involved, we must take action ourselves.)
|
2020-05-05 23:59:02 +03:00
|
|
|
|
|
|
|
=
|
|
|
|
void Extensions::construct_graph(inform_extension *E) {
|
2023-07-23 17:28:37 +03:00
|
|
|
if (Supervisor::project_set_at_command_line() == NULL) {
|
|
|
|
Copies::get_source_text(E->as_copy, I"graphing extension");
|
|
|
|
Sentences::set_start_of_source(sfsm, -1);
|
|
|
|
Inclusions::traverse(E->as_copy, E->syntax_tree);
|
|
|
|
linked_list *L = NEW_LINKED_LIST(inbuild_nest);
|
|
|
|
inbuild_nest *N = Extensions::materials_nest(E);
|
|
|
|
ADD_TO_LINKED_LIST(N, inbuild_nest, L);
|
|
|
|
inbuild_requirement *req;
|
|
|
|
LOOP_OVER_LINKED_LIST(req, inbuild_requirement, E->kits) {
|
|
|
|
inform_kit *K = Kits::find_by_name(req->work->raw_title, L, NULL);
|
|
|
|
if (K) Graphs::need_this_to_use(E->as_copy->vertex, K->as_copy->vertex);
|
|
|
|
}
|
2023-06-24 14:06:10 +03:00
|
|
|
}
|
2020-05-05 23:59:02 +03:00
|
|
|
}
|
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
@h Read source text.
|
|
|
|
The scan only skimmed the surface of the file, and didn't try to parse it as
|
|
|
|
natural language text with Preform. But if the extension turns out to be one
|
|
|
|
that we need to use for something, we'll need to read its full text eventually.
|
|
|
|
This is that time.
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
At present all extensions are assumed to have English as the language of syntax.
|
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
=
|
|
|
|
void Extensions::read_source_text_for(inform_extension *E) {
|
2020-05-05 23:59:02 +03:00
|
|
|
inform_language *L = Languages::find_for(I"English", Extensions::nest_list(E));
|
|
|
|
Languages::read_Preform_definition(L, Extensions::nest_list(E));
|
2022-12-08 01:28:26 +02:00
|
|
|
filename *F = Extensions::main_source_file(E->as_copy);
|
2020-06-28 01:18:54 +03:00
|
|
|
TEMPORARY_TEXT(synopsis)
|
2020-03-31 02:17:21 +03:00
|
|
|
@<Concoct a synopsis for the extension to be read@>;
|
2023-07-13 02:23:12 +03:00
|
|
|
E->read_into_file = SourceText::read_file(E->as_copy, F, synopsis, FALSE);
|
2023-06-02 00:24:00 +03:00
|
|
|
SVEXPLAIN(1, "(from %f)\n", F);
|
2020-06-28 01:18:54 +03:00
|
|
|
DISCARD_TEXT(synopsis)
|
2020-03-31 02:17:21 +03:00
|
|
|
if (E->read_into_file) {
|
2023-07-21 13:49:36 +03:00
|
|
|
text_stream *doc = TextFromFiles::torn_off_documentation(E->read_into_file);
|
2023-09-04 01:58:21 +03:00
|
|
|
if (Str::len(doc) > 0)
|
2023-09-12 12:51:37 +03:00
|
|
|
E->documentation = DocumentationCompiler::compile_from_text(doc, E, NULL);
|
2023-07-21 13:49:36 +03:00
|
|
|
else E->documentation = NULL;
|
2020-03-31 02:17:21 +03:00
|
|
|
E->read_into_file->your_ref = STORE_POINTER_inbuild_copy(E->as_copy);
|
|
|
|
@<Break the text into sentences@>;
|
|
|
|
E->body_text_unbroken = FALSE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@ We concoct a textual synopsis in the form
|
2020-04-08 01:02:44 +03:00
|
|
|
= (text)
|
|
|
|
"Pantomime Sausages by Mr Punch"
|
|
|
|
=
|
2020-03-31 02:17:21 +03:00
|
|
|
to be used by |SourceFiles::read_extension_source_text| for printing to |stdout|. Since
|
|
|
|
we dare not assume |stdout| can manage characters outside the basic ASCII
|
|
|
|
range, we flatten them from general ISO to plain ASCII.
|
|
|
|
|
|
|
|
@<Concoct a synopsis for the extension to be read@> =
|
2023-05-24 10:17:49 +03:00
|
|
|
if (VersionNumbers::is_null(E->as_copy->edition->version) == FALSE)
|
|
|
|
WRITE_TO(synopsis, "version %v of ", &(E->as_copy->edition->version));
|
2020-03-31 02:17:21 +03:00
|
|
|
WRITE_TO(synopsis, "%S by %S",
|
|
|
|
E->as_copy->edition->work->title,
|
|
|
|
E->as_copy->edition->work->author_name);
|
|
|
|
LOOP_THROUGH_TEXT(pos, synopsis)
|
|
|
|
Str::put(pos,
|
2023-09-05 10:36:51 +03:00
|
|
|
Characters::make_filename_safe(
|
2020-07-08 17:57:07 +03:00
|
|
|
Str::get(pos)));
|
2020-03-31 02:17:21 +03:00
|
|
|
|
|
|
|
@ Note that if there is an active project, then we are reading the extension
|
|
|
|
in order to include it in that, and so we send it to the project's syntax tree,
|
|
|
|
rather than to the extension's own one. But if we are simply examining the
|
|
|
|
extension by running |-graph| on it in the Inbuild command line, for example,
|
|
|
|
then its sentences will go to the extension's own tree.
|
|
|
|
|
|
|
|
@<Break the text into sentences@> =
|
|
|
|
wording EXW = E->read_into_file->text_read;
|
2023-07-17 02:44:11 +03:00
|
|
|
E->body_text = EXW;
|
|
|
|
E->body_text_unbroken = TRUE; /* mark this to be sentence-broken */
|
2020-05-05 23:59:02 +03:00
|
|
|
inform_project *project = E->read_into_project;
|
2020-03-31 02:17:21 +03:00
|
|
|
if (project) E->syntax_tree = project->syntax_tree;
|
2020-05-05 23:59:02 +03:00
|
|
|
Sentences::break_into_extension_copy(E->syntax_tree,
|
|
|
|
E->body_text, E->as_copy, project);
|
2020-03-31 02:17:21 +03:00
|
|
|
E->body_text_unbroken = FALSE;
|
|
|
|
|
2023-07-30 21:07:54 +03:00
|
|
|
@ And here's the top line:
|
|
|
|
|
|
|
|
=
|
|
|
|
source_location Extensions::top_line_location(inform_extension *E) {
|
|
|
|
source_location sl;
|
|
|
|
sl.file_of_origin = E->read_into_file;
|
|
|
|
sl.line_number = 1;
|
|
|
|
return sl;
|
|
|
|
}
|
|
|
|
|
2023-04-11 00:54:14 +03:00
|
|
|
@ In directory extensions, documentation can be stored separately:
|
|
|
|
|
|
|
|
=
|
2023-09-12 12:51:37 +03:00
|
|
|
compiled_documentation *Extensions::get_documentation(inform_extension *E,
|
|
|
|
filename *sitemap) {
|
2023-07-17 02:44:11 +03:00
|
|
|
if (E == NULL) return NULL;
|
2023-09-29 01:33:24 +03:00
|
|
|
int was = SourceText::for_documentation_only(TRUE);
|
2023-07-23 17:28:37 +03:00
|
|
|
Copies::get_source_text(E->as_copy, I"getting documentation"); /* in the unlikely event this has not happened yet */
|
2023-09-29 01:33:24 +03:00
|
|
|
SourceText::for_documentation_only(was);
|
2023-04-11 00:54:14 +03:00
|
|
|
if (E->documentation_sought == FALSE) {
|
|
|
|
if (E->as_copy->location_if_path) {
|
|
|
|
pathname *D = Pathnames::down(E->as_copy->location_if_path, I"Documentation");
|
2023-07-21 13:49:36 +03:00
|
|
|
if (Directories::exists(D)) @<Fetch wording from stand-alone directory@>;
|
2023-04-11 00:54:14 +03:00
|
|
|
}
|
|
|
|
E->documentation_sought = TRUE;
|
|
|
|
}
|
2023-07-17 02:44:11 +03:00
|
|
|
return E->documentation;
|
2023-04-11 00:54:14 +03:00
|
|
|
}
|
|
|
|
|
2023-07-21 13:49:36 +03:00
|
|
|
@<Fetch wording from stand-alone directory@> =
|
|
|
|
if (E->documentation) {
|
2023-04-11 00:54:14 +03:00
|
|
|
TEMPORARY_TEXT(error_text)
|
|
|
|
WRITE_TO(error_text,
|
|
|
|
"this extension provides documentation both as a file and in its source");
|
|
|
|
Copies::attach_error(E->as_copy, CopyErrors::new_T(EXT_MISWORDED_CE, -1, error_text));
|
|
|
|
DISCARD_TEXT(error_text)
|
|
|
|
} else {
|
2023-09-12 12:51:37 +03:00
|
|
|
E->documentation = DocumentationCompiler::compile_from_path(D, E, sitemap);
|
2023-07-21 13:49:36 +03:00
|
|
|
}
|
2023-04-11 00:54:14 +03:00
|
|
|
|
2023-07-20 01:46:39 +03:00
|
|
|
@ And this serves the |-document| feature of inbuild:
|
|
|
|
|
|
|
|
=
|
2023-09-12 12:51:37 +03:00
|
|
|
void Extensions::document(inform_extension *E, pathname *dest, filename *sitemap) {
|
2023-07-20 01:46:39 +03:00
|
|
|
SVEXPLAIN(1, "(documenting %X to %p)\n", E->as_copy->edition->work, dest);
|
2023-09-12 12:51:37 +03:00
|
|
|
compiled_documentation *cd = Extensions::get_documentation(E, sitemap);
|
2023-09-25 20:40:53 +03:00
|
|
|
DocumentationRenderer::as_HTML(dest, cd, NULL, NULL);
|
2023-07-20 01:46:39 +03:00
|
|
|
}
|
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
@ When the extension source text was read from its |source_file|, we
|
|
|
|
attached a reference to say which |inform_extension| it was, and here we
|
|
|
|
make use of that:
|
|
|
|
|
|
|
|
=
|
|
|
|
inform_extension *Extensions::corresponding_to(source_file *sf) {
|
|
|
|
if (sf == NULL) return NULL;
|
|
|
|
inbuild_copy *C = RETRIEVE_POINTER_inbuild_copy(sf->your_ref);
|
|
|
|
if (C == NULL) return NULL;
|
2023-07-23 17:28:37 +03:00
|
|
|
if ((C->edition->work->genre != extension_genre) &&
|
|
|
|
(C->edition->work->genre != extension_bundle_genre)) return NULL;
|
2022-12-08 01:28:26 +02:00
|
|
|
return Extensions::from_copy(C);
|
2020-03-31 02:17:21 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
@h Miscellaneous.
|
|
|
|
|
|
|
|
=
|
2020-02-17 02:16:38 +02:00
|
|
|
void Extensions::write(OUTPUT_STREAM, inform_extension *E) {
|
|
|
|
if (E == NULL) WRITE("none");
|
|
|
|
else WRITE("%X", E->as_copy->edition->work);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Extensions::write_name_to_file(inform_extension *E, OUTPUT_STREAM) {
|
|
|
|
WRITE("%S", E->as_copy->edition->work->raw_title);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Extensions::write_author_to_file(inform_extension *E, OUTPUT_STREAM) {
|
|
|
|
WRITE("%S", E->as_copy->edition->work->raw_author_name);
|
|
|
|
}
|
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
@ Three pieces of information will be set later on, by other parts of Inform
|
|
|
|
calling the routines below.
|
2020-02-17 02:16:38 +02:00
|
|
|
|
|
|
|
The rubric text for an extension, which is double-quoted matter just below
|
|
|
|
its "begins here" line, is parsed as a sentence and will be read as an
|
|
|
|
assertion in the usual way when the material from this extension is being
|
|
|
|
worked through (quite a long time after the EF structure was created). When
|
2020-05-09 14:10:43 +03:00
|
|
|
that happens, the following function will be called to set the rubric.
|
2020-02-17 02:16:38 +02:00
|
|
|
|
|
|
|
=
|
|
|
|
void Extensions::set_rubric(inform_extension *E, text_stream *text) {
|
2020-05-06 01:52:20 +03:00
|
|
|
if (E == NULL) internal_error("no extension");
|
2020-02-17 02:16:38 +02:00
|
|
|
E->rubric_as_lexed = Str::duplicate(text);
|
|
|
|
LOGIF(EXTENSIONS_CENSUS, "Extension rubric: %S\n", E->rubric_as_lexed);
|
|
|
|
}
|
|
|
|
|
2020-02-19 22:48:30 +02:00
|
|
|
text_stream *Extensions::get_rubric(inform_extension *E) {
|
|
|
|
if (E == NULL) return NULL;
|
|
|
|
return E->rubric_as_lexed;
|
|
|
|
}
|
|
|
|
|
2020-05-06 01:52:20 +03:00
|
|
|
@ The optional extra credit line is used to acknowledge I6 sources,
|
|
|
|
collaborators, translators and so on.
|
|
|
|
|
|
|
|
=
|
2020-02-17 02:16:38 +02:00
|
|
|
void Extensions::set_extra_credit(inform_extension *E, text_stream *text) {
|
2020-05-06 01:52:20 +03:00
|
|
|
if (E == NULL) internal_error("no extension");
|
2020-02-17 02:16:38 +02:00
|
|
|
E->extra_credit_as_lexed = Str::duplicate(text);
|
|
|
|
LOGIF(EXTENSIONS_CENSUS, "Extension extra credit: %S\n", E->extra_credit_as_lexed);
|
|
|
|
}
|
|
|
|
|
|
|
|
@ The use option "authorial modesty" is unusual in applying to the extension
|
|
|
|
it is found in, not the whole source text. When we read it, we call one of
|
|
|
|
the following routines, depending on whether it was in an extension or in
|
|
|
|
the main source text:
|
|
|
|
|
|
|
|
=
|
|
|
|
int general_authorial_modesty = FALSE;
|
|
|
|
void Extensions::set_authorial_modesty(inform_extension *E) {
|
2020-05-06 01:52:20 +03:00
|
|
|
if (E == NULL) internal_error("no extension");
|
2020-02-17 02:16:38 +02:00
|
|
|
E->authorial_modesty = TRUE;
|
|
|
|
}
|
2020-03-31 02:17:21 +03:00
|
|
|
void Extensions::set_general_authorial_modesty(void) {
|
|
|
|
general_authorial_modesty = TRUE;
|
|
|
|
}
|
2020-02-17 02:16:38 +02:00
|
|
|
|
2020-05-06 01:52:20 +03:00
|
|
|
@ The inclusion sentence for an extension is where it was Included in a
|
|
|
|
project's syntax tree (if it was). It isn't used in compilation, only for
|
|
|
|
problem messages and the index.
|
|
|
|
|
|
|
|
=
|
2020-02-17 00:01:50 +02:00
|
|
|
void Extensions::set_inclusion_sentence(inform_extension *E, parse_node *N) {
|
|
|
|
E->inclusion_sentence = N;
|
|
|
|
}
|
2020-02-17 02:16:38 +02:00
|
|
|
parse_node *Extensions::get_inclusion_sentence(inform_extension *E) {
|
|
|
|
if (E == NULL) return NULL;
|
|
|
|
return E->inclusion_sentence;
|
|
|
|
}
|
2020-02-17 00:01:50 +02:00
|
|
|
|
2020-05-06 01:52:20 +03:00
|
|
|
@ An extension is "standard" if it's either the Standard Rules or Basic Inform.
|
|
|
|
|
|
|
|
=
|
2020-02-17 00:01:50 +02:00
|
|
|
int Extensions::is_standard(inform_extension *E) {
|
|
|
|
if (E == NULL) return FALSE;
|
|
|
|
return E->standard;
|
|
|
|
}
|
|
|
|
|
2020-05-06 01:52:20 +03:00
|
|
|
@h Version requirements.
|
|
|
|
When it's known that an extension must satisfy a given version requirement --
|
|
|
|
say, being version 7.2.1 or better -- the following is called. Note that
|
|
|
|
if incompatible requirements are placed on it, the range in |E->must_satisfy|
|
|
|
|
becomes empty and stays that way.
|
2020-02-17 00:01:50 +02:00
|
|
|
|
2020-05-06 01:52:20 +03:00
|
|
|
=
|
2020-02-17 02:16:38 +02:00
|
|
|
void Extensions::must_satisfy(inform_extension *E, inbuild_requirement *req) {
|
|
|
|
if (E->must_satisfy == NULL) E->must_satisfy = req;
|
2020-03-31 02:17:21 +03:00
|
|
|
else VersionNumberRanges::intersect_range(E->must_satisfy->version_range, req->version_range);
|
2020-02-17 02:16:38 +02:00
|
|
|
}
|
2020-02-17 00:01:50 +02:00
|
|
|
|
2020-05-06 01:52:20 +03:00
|
|
|
@ And it is certainly possible, if an extension is loaded for multiple
|
|
|
|
reasons with different versioning needs, that the extension no longer meets
|
|
|
|
its requirements (even though it did when first loaded). This tests for that:
|
|
|
|
|
|
|
|
=
|
2020-02-17 02:16:38 +02:00
|
|
|
int Extensions::satisfies(inform_extension *E) {
|
|
|
|
if (E == NULL) return FALSE;
|
|
|
|
return Requirements::meets(E->as_copy->edition, E->must_satisfy);
|
|
|
|
}
|
2023-02-09 00:08:44 +02:00
|
|
|
|
|
|
|
@h File hierarchy tidying.
|
|
|
|
|
|
|
|
=
|
|
|
|
int Extensions::rename_directory(pathname *P, text_stream *new_name) {
|
|
|
|
TEMPORARY_TEXT(task)
|
|
|
|
WRITE_TO(task, "(Changing directory name '%p' to '%S')\n", P, new_name);
|
|
|
|
int rv = Directories::rename(P, new_name);
|
|
|
|
if (rv) WRITE_TO(STDOUT, "%S", task);
|
|
|
|
return rv;
|
|
|
|
}
|
|
|
|
|
|
|
|
int Extensions::rename_file(filename *F, text_stream *new_name) {
|
|
|
|
TEMPORARY_TEXT(task)
|
|
|
|
WRITE_TO(task, "(Changing file name '%f' to '%S')\n", F, new_name);
|
|
|
|
int rv = Filenames::rename(F, new_name);
|
|
|
|
if (rv) WRITE_TO(STDOUT, "%S", task);
|
|
|
|
return rv;
|
|
|
|
}
|
2023-07-28 02:43:38 +03:00
|
|
|
|
|
|
|
@h Modernisation.
|
|
|
|
|
|
|
|
=
|
|
|
|
int Extensions::modernise(inform_extension *E, text_stream *OUT) {
|
|
|
|
if (E->as_copy->edition->work->genre == extension_bundle_genre) {
|
|
|
|
WRITE("already in directory format\n");
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
ExtensionConverter::go(E, OUT);
|
|
|
|
return TRUE;
|
|
|
|
}
|