2020-02-09 01:28:32 +02:00
|
|
|
[Projects::] Project Services.
|
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
Behaviour specific to copies of either the projectbundle or projectfile genres.
|
2020-02-09 01:28:32 +02:00
|
|
|
|
2020-05-05 01:34:55 +03:00
|
|
|
@h Scanning metadata.
|
2022-09-17 21:49:46 +03:00
|
|
|
Metadata for projects is stored in the following structure.
|
2020-03-09 14:44:59 +02:00
|
|
|
|
2020-05-05 01:34:55 +03:00
|
|
|
=
|
2020-02-09 01:28:32 +02:00
|
|
|
typedef struct inform_project {
|
|
|
|
struct inbuild_copy *as_copy;
|
2021-09-12 15:21:13 +03:00
|
|
|
int stand_alone; /* rather than being in a .inform project bundle */
|
2020-05-05 23:59:02 +03:00
|
|
|
struct inbuild_nest *materials_nest;
|
|
|
|
struct linked_list *search_list; /* of |inbuild_nest| */
|
|
|
|
struct filename *primary_source;
|
2021-09-12 15:21:13 +03:00
|
|
|
struct filename *primary_output;
|
2020-02-19 22:48:30 +02:00
|
|
|
struct semantic_version_number version;
|
2020-02-12 01:20:14 +02:00
|
|
|
struct linked_list *source_vertices; /* of |build_vertex| */
|
2022-06-28 00:25:28 +03:00
|
|
|
struct linked_list *kit_names_to_include; /* of |JSON_value| */
|
2020-03-09 14:44:59 +02:00
|
|
|
struct linked_list *kits_to_include; /* of |kit_dependency| */
|
2022-06-23 19:37:16 +03:00
|
|
|
struct text_stream *name_of_language_of_play;
|
2020-02-14 02:06:28 +02:00
|
|
|
struct inform_language *language_of_play;
|
2023-02-05 01:30:25 +02:00
|
|
|
struct text_stream *name_of_language_of_syntax;
|
2020-02-14 02:06:28 +02:00
|
|
|
struct inform_language *language_of_syntax;
|
2023-02-05 01:30:25 +02:00
|
|
|
struct text_stream *name_of_language_of_index;
|
2020-02-14 02:06:28 +02:00
|
|
|
struct inform_language *language_of_index;
|
2020-02-26 12:02:06 +02:00
|
|
|
struct build_vertex *unblorbed_vertex;
|
|
|
|
struct build_vertex *blorbed_vertex;
|
2020-02-22 01:16:23 +02:00
|
|
|
struct build_vertex *chosen_build_target;
|
2020-03-05 14:42:33 +02:00
|
|
|
struct parse_node_tree *syntax_tree;
|
2020-05-07 18:44:07 +03:00
|
|
|
struct linked_list *extensions_included; /* of |inform_extension| */
|
2022-09-01 02:14:18 +03:00
|
|
|
struct linked_list *activations; /* of |element_activation| */
|
2020-02-26 21:58:32 +02:00
|
|
|
int fix_rng;
|
2020-05-05 23:59:02 +03:00
|
|
|
int compile_for_release;
|
|
|
|
int compile_only;
|
2020-05-09 15:07:39 +03:00
|
|
|
CLASS_DEFINITION
|
2020-02-09 01:28:32 +02:00
|
|
|
} inform_project;
|
|
|
|
|
2020-05-05 01:34:55 +03:00
|
|
|
@ This is called as soon as a new copy |C| of the language genre is created.
|
|
|
|
It doesn't actually do any scanning to speak of, in fact: we may eventually
|
|
|
|
learn a lot about the project, but for now we simply initialise to bland
|
|
|
|
placeholders.
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::scan(inbuild_copy *C) {
|
|
|
|
inform_project *proj = CREATE(inform_project);
|
|
|
|
proj->as_copy = C;
|
|
|
|
if (C == NULL) internal_error("no copy to scan");
|
|
|
|
Copies::set_metadata(C, STORE_POINTER_inform_project(proj));
|
2021-09-12 15:21:13 +03:00
|
|
|
proj->stand_alone = FALSE;
|
2020-05-05 01:34:55 +03:00
|
|
|
proj->version = VersionNumbers::null();
|
|
|
|
proj->source_vertices = NEW_LINKED_LIST(build_vertex);
|
2022-06-28 00:25:28 +03:00
|
|
|
proj->kit_names_to_include = NEW_LINKED_LIST(JSON_value);
|
2020-05-05 01:34:55 +03:00
|
|
|
proj->kits_to_include = NEW_LINKED_LIST(kit_dependency);
|
2022-06-23 19:37:16 +03:00
|
|
|
proj->name_of_language_of_play = I"English";
|
2020-05-05 01:34:55 +03:00
|
|
|
proj->language_of_play = NULL;
|
2023-02-05 01:30:25 +02:00
|
|
|
proj->name_of_language_of_syntax = I"English";
|
2020-05-05 01:34:55 +03:00
|
|
|
proj->language_of_syntax = NULL;
|
2023-02-05 01:30:25 +02:00
|
|
|
proj->name_of_language_of_index = NULL;
|
2020-05-05 01:34:55 +03:00
|
|
|
proj->language_of_index = NULL;
|
|
|
|
proj->chosen_build_target = NULL;
|
|
|
|
proj->unblorbed_vertex = NULL;
|
|
|
|
proj->blorbed_vertex = NULL;
|
|
|
|
proj->fix_rng = 0;
|
2020-05-05 23:59:02 +03:00
|
|
|
proj->compile_for_release = FALSE;
|
|
|
|
proj->compile_only = FALSE;
|
2020-05-11 17:21:29 +03:00
|
|
|
proj->syntax_tree = SyntaxTree::new();
|
2020-05-05 23:59:02 +03:00
|
|
|
pathname *P = Projects::path(proj), *M;
|
|
|
|
if (proj->as_copy->location_if_path)
|
|
|
|
M = Projects::materialise_pathname(
|
|
|
|
Pathnames::up(P), Pathnames::directory_name(P));
|
|
|
|
else
|
|
|
|
M = Projects::materialise_pathname(
|
|
|
|
P, Filenames::get_leafname(proj->as_copy->location_if_file));
|
2021-04-13 00:30:39 +03:00
|
|
|
proj->materials_nest = Supervisor::add_nest(M, MATERIALS_NEST_TAG);
|
2020-05-05 23:59:02 +03:00
|
|
|
proj->search_list = NEW_LINKED_LIST(inbuild_nest);
|
|
|
|
proj->primary_source = NULL;
|
2020-05-07 18:44:07 +03:00
|
|
|
proj->extensions_included = NEW_LINKED_LIST(inform_extension);
|
2022-09-01 02:14:18 +03:00
|
|
|
proj->activations = NEW_LINKED_LIST(element_activation);
|
2022-06-23 19:37:16 +03:00
|
|
|
Projects::scan_bibliographic_data(proj);
|
2022-06-24 02:58:19 +03:00
|
|
|
filename *F = Filenames::in(M, I"project_metadata.json");
|
|
|
|
if (TextFiles::exists(F)) {
|
2023-03-22 07:45:28 +02:00
|
|
|
JSONMetadata::read_metadata_file(C, F, NULL, NULL);
|
2022-06-24 02:58:19 +03:00
|
|
|
if (C->metadata_record) {
|
|
|
|
JSON_value *is = JSON::look_up_object(C->metadata_record, I"is");
|
|
|
|
if (is) {
|
|
|
|
JSON_value *version = JSON::look_up_object(is, I"version");
|
|
|
|
if (version) {
|
|
|
|
proj->version = VersionNumbers::from_text(version->if_string);
|
|
|
|
}
|
|
|
|
}
|
2022-09-01 02:14:18 +03:00
|
|
|
@<Extract activations@>;
|
2022-06-24 02:58:19 +03:00
|
|
|
JSON_value *project_details =
|
|
|
|
JSON::look_up_object(C->metadata_record, I"project-details");
|
|
|
|
if (project_details) {
|
2022-06-28 00:25:28 +03:00
|
|
|
@<Extract the project details@>;
|
2022-06-24 02:58:19 +03:00
|
|
|
}
|
|
|
|
JSON_value *needs = JSON::look_up_object(C->metadata_record, I"needs");
|
|
|
|
if (needs) {
|
|
|
|
JSON_value *E;
|
|
|
|
LOOP_OVER_LINKED_LIST(E, JSON_value, needs->if_list)
|
|
|
|
@<Extract this requirement@>;
|
|
|
|
}
|
|
|
|
}
|
2023-06-02 00:24:00 +03:00
|
|
|
} else {
|
|
|
|
SVEXPLAIN(2, "(no JSON metadata file found at %f)\n", F);
|
2022-06-24 02:58:19 +03:00
|
|
|
}
|
2020-05-05 23:59:02 +03:00
|
|
|
}
|
|
|
|
|
2022-09-01 02:14:18 +03:00
|
|
|
@<Extract activations@> =
|
|
|
|
JSON_value *activates = JSON::look_up_object(C->metadata_record, I"activates");
|
|
|
|
if (activates) {
|
|
|
|
JSON_value *E;
|
|
|
|
LOOP_OVER_LINKED_LIST(E, JSON_value, activates->if_list)
|
|
|
|
Projects::activation(proj, E->if_string, TRUE);
|
|
|
|
}
|
|
|
|
JSON_value *deactivates = JSON::look_up_object(C->metadata_record, I"deactivates");
|
|
|
|
if (deactivates) {
|
|
|
|
JSON_value *E;
|
|
|
|
LOOP_OVER_LINKED_LIST(E, JSON_value, deactivates->if_list)
|
|
|
|
Projects::activation(proj, E->if_string, FALSE);
|
|
|
|
}
|
|
|
|
|
2022-06-24 02:58:19 +03:00
|
|
|
@<Extract the project details@> =
|
|
|
|
;
|
|
|
|
|
|
|
|
@<Extract this requirement@> =
|
|
|
|
JSON_value *if_clause = JSON::look_up_object(E, I"if");
|
|
|
|
JSON_value *unless_clause = JSON::look_up_object(E, I"unless");
|
|
|
|
if ((if_clause) || (unless_clause)) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "a project's needs must be unconditional");
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
|
|
|
JSON_value *need_clause = JSON::look_up_object(E, I"need");
|
|
|
|
if (need_clause) {
|
|
|
|
JSON_value *need_type = JSON::look_up_object(need_clause, I"type");
|
|
|
|
JSON_value *need_version_range = JSON::look_up_object(need_clause, I"version-range");
|
2022-06-28 00:25:28 +03:00
|
|
|
if (need_version_range) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "version ranges on project dependencies are not yet implemented");
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
2022-06-24 02:58:19 +03:00
|
|
|
if (Str::eq(need_type->if_string, I"kit")) {
|
2022-06-28 00:25:28 +03:00
|
|
|
ADD_TO_LINKED_LIST(need_clause, JSON_value, proj->kit_names_to_include);
|
2022-06-24 02:58:19 +03:00
|
|
|
} else if (Str::eq(need_type->if_string, I"extension")) {
|
|
|
|
;
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "a project can only have kits or extensions as dependencies");
|
|
|
|
Copies::attach_error(C, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-01 02:14:18 +03:00
|
|
|
@ Language elements can similarly be activated or deactivated, though the
|
|
|
|
latter may not be useful in practice:
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::activation(inform_project *proj, text_stream *name, int act) {
|
|
|
|
element_activation *EA = CREATE(element_activation);
|
|
|
|
EA->element_name = Str::duplicate(name);
|
|
|
|
EA->activate = act;
|
|
|
|
ADD_TO_LINKED_LIST(EA, element_activation, proj->activations);
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ The materials folder sits alongside the project and has the same name,
|
|
|
|
but ending |.materials| instead of |.inform|.
|
|
|
|
|
|
|
|
=
|
|
|
|
pathname *Projects::materialise_pathname(pathname *in, text_stream *leaf) {
|
2020-06-28 01:18:54 +03:00
|
|
|
TEMPORARY_TEXT(mf)
|
2020-05-05 23:59:02 +03:00
|
|
|
WRITE_TO(mf, "%S", leaf);
|
|
|
|
int i = Str::len(mf)-1;
|
|
|
|
while ((i>0) && (Str::get_at(mf, i) != '.')) i--;
|
|
|
|
if (i>0) {
|
|
|
|
Str::truncate(mf, i);
|
|
|
|
WRITE_TO(mf, ".materials");
|
|
|
|
}
|
|
|
|
pathname *materials = Pathnames::down(in, mf);
|
2020-06-28 01:18:54 +03:00
|
|
|
DISCARD_TEXT(mf)
|
2020-05-05 23:59:02 +03:00
|
|
|
return materials;
|
2020-02-09 01:28:32 +02:00
|
|
|
}
|
2020-02-11 02:15:49 +02:00
|
|
|
|
2021-09-12 15:21:13 +03:00
|
|
|
@ Returns |TRUE| for a project arising from a single file, |FALSE| for a
|
|
|
|
project in a |.inform| bundle. (Withing the UI apps, then, all projects return
|
|
|
|
|FALSE| here; it's only command-line use of Inform which involves stand-alone files.)
|
|
|
|
|
|
|
|
=
|
|
|
|
int Projects::stand_alone(inform_project *proj) {
|
|
|
|
if (proj == NULL) return FALSE;
|
|
|
|
return proj->stand_alone;
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ The file-system path to the project. For a "bundle" made by the Inform GUI
|
|
|
|
apps, the bundle itself is a directory (even if this is concealed from the
|
|
|
|
user on macOS) and the following returns that path. For a loose file of
|
|
|
|
Inform source text, it's the directory in which the file is found. (This is
|
|
|
|
a 2020 change of policy: previously it was the CWD. The practical difference
|
|
|
|
is small, but one likes to minimise the effect of the CWD.)
|
2020-05-05 01:34:55 +03:00
|
|
|
|
|
|
|
=
|
2020-05-05 23:59:02 +03:00
|
|
|
pathname *Projects::path(inform_project *proj) {
|
|
|
|
if (proj == NULL) return NULL;
|
|
|
|
if (proj->as_copy->location_if_path)
|
|
|
|
return proj->as_copy->location_if_path;
|
|
|
|
return Filenames::up(proj->as_copy->location_if_file);
|
|
|
|
}
|
|
|
|
|
|
|
|
pathname *Projects::build_path(inform_project *proj) {
|
2023-08-09 01:50:45 +03:00
|
|
|
return Pathnames::down(Projects::path(proj), I"Build");
|
2020-05-05 23:59:02 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
inbuild_nest *Projects::materials_nest(inform_project *proj) {
|
|
|
|
if (proj == NULL) return NULL;
|
|
|
|
return proj->materials_nest;
|
|
|
|
}
|
|
|
|
|
|
|
|
pathname *Projects::materials_path(inform_project *proj) {
|
|
|
|
if (proj == NULL) return NULL;
|
|
|
|
return proj->materials_nest->location;
|
|
|
|
}
|
|
|
|
|
|
|
|
@ Each project has its own search list of nests, but this always consists of,
|
|
|
|
first, its own Materials nest, and then the shared search list. For timing
|
|
|
|
reasons, this list is created on demand.
|
|
|
|
|
|
|
|
=
|
|
|
|
linked_list *Projects::nest_list(inform_project *proj) {
|
|
|
|
if (proj == NULL) return Supervisor::shared_nest_list();
|
|
|
|
RUN_ONLY_FROM_PHASE(NESTED_INBUILD_PHASE)
|
|
|
|
if (LinkedLists::len(proj->search_list) == 0) {
|
|
|
|
ADD_TO_LINKED_LIST(proj->materials_nest, inbuild_nest, proj->search_list);
|
|
|
|
inbuild_nest *N;
|
|
|
|
linked_list *L = Supervisor::shared_nest_list();
|
|
|
|
LOOP_OVER_LINKED_LIST(N, inbuild_nest, L)
|
|
|
|
ADD_TO_LINKED_LIST(N, inbuild_nest, proj->search_list);
|
|
|
|
}
|
|
|
|
return proj->search_list;
|
|
|
|
}
|
|
|
|
|
2023-02-03 01:16:53 +02:00
|
|
|
void Projects::add_language_extension_nest(inform_project *proj) {
|
|
|
|
if ((proj->language_of_play) && (proj->language_of_play->belongs_to)) {
|
|
|
|
inform_extension *E = proj->language_of_play->belongs_to;
|
|
|
|
inbuild_nest *N = Extensions::materials_nest(E);
|
|
|
|
if (N) ADD_TO_LINKED_LIST(N, inbuild_nest, proj->search_list);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-08 01:28:26 +02:00
|
|
|
@ Since there are two ways projects can be stored:
|
|
|
|
|
|
|
|
=
|
|
|
|
inform_project *Projects::from_copy(inbuild_copy *C) {
|
|
|
|
inform_project *project = ProjectBundleManager::from_copy(C);
|
|
|
|
if (project == NULL) project = ProjectFileManager::from_copy(C);
|
|
|
|
return project;
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h Files of source text.
|
|
|
|
A project can have multiple files of I7 source text, but more usually it
|
|
|
|
has a single, "primary", one.
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::set_primary_source(inform_project *proj, filename *F) {
|
|
|
|
proj->primary_source = F;
|
|
|
|
}
|
|
|
|
|
2021-09-13 02:33:30 +03:00
|
|
|
filename *Projects::get_primary_source(inform_project *proj) {
|
|
|
|
return proj->primary_source;
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ The following constructs the list of "source vertices" -- vertices in the
|
|
|
|
build graph representing the source files -- on demand. The reason this isn't
|
|
|
|
done automatically when the |proj| is created is that we needed to give time
|
|
|
|
for someone to call //Projects::set_primary_source//, since that will affect
|
|
|
|
the outcome.
|
|
|
|
|
|
|
|
=
|
|
|
|
linked_list *Projects::source(inform_project *proj) {
|
|
|
|
if (proj == NULL) return NULL;
|
|
|
|
if (LinkedLists::len(proj->source_vertices) == 0)
|
|
|
|
@<Try the source file set at the command line, if any was@>;
|
|
|
|
if (LinkedLists::len(proj->source_vertices) == 0)
|
|
|
|
@<Fall back on the traditional choice@>;
|
|
|
|
return proj->source_vertices;
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Try the source file set at the command line, if any was@> =
|
|
|
|
if (proj->primary_source) {
|
|
|
|
build_vertex *S = Graphs::file_vertex(proj->primary_source);
|
|
|
|
S->source_source = I"your source text";
|
|
|
|
ADD_TO_LINKED_LIST(S, build_vertex, proj->source_vertices);
|
|
|
|
}
|
|
|
|
|
|
|
|
@ If a bundle is found, then by default the source text within it is called
|
|
|
|
|story.ni|. The |.ni| is an anachronism now, but at one time stood for
|
|
|
|
"natural Inform", the working title for Inform 7 in the early 2000s.
|
|
|
|
|
|
|
|
@<Fall back on the traditional choice@> =
|
|
|
|
filename *F = proj->as_copy->location_if_file;
|
|
|
|
if (proj->as_copy->location_if_path)
|
|
|
|
F = Filenames::in(
|
|
|
|
Pathnames::down(proj->as_copy->location_if_path, I"Source"),
|
|
|
|
I"story.ni");
|
|
|
|
build_vertex *S = Graphs::file_vertex(F);
|
|
|
|
S->source_source = I"your source text";
|
|
|
|
ADD_TO_LINKED_LIST(S, build_vertex, proj->source_vertices);
|
|
|
|
|
2022-11-02 12:28:30 +02:00
|
|
|
@ Further source files may become apparent when headings are read in the
|
|
|
|
source text we already have, and which refer to specific files in the |Source|
|
|
|
|
subdirectory of the materials directory; so those are added here. (This happens
|
|
|
|
safely before the full graph for the project is made, so they do appear in
|
|
|
|
that dependency graph.)
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::add_heading_source(inform_project *proj, text_stream *path) {
|
|
|
|
pathname *P = NULL;
|
|
|
|
if (proj->as_copy->location_if_path)
|
|
|
|
P = Pathnames::down(Projects::materials_path(proj), I"Source");
|
|
|
|
if (P) {
|
|
|
|
build_vertex *S = Graphs::file_vertex(Filenames::in(P, path));
|
|
|
|
S->source_source = Str::duplicate(path);
|
|
|
|
ADD_TO_LINKED_LIST(S, build_vertex, proj->source_vertices);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ The //inform7// compiler sometimes wants to know whether a particular
|
|
|
|
source file belongs to the project or not, so:
|
|
|
|
|
|
|
|
=
|
|
|
|
int Projects::draws_from_source_file(inform_project *proj, source_file *sf) {
|
|
|
|
if (proj == NULL) return FALSE;
|
|
|
|
linked_list *L = Projects::source(proj);
|
|
|
|
if (L == NULL) return FALSE;
|
|
|
|
build_vertex *S;
|
|
|
|
LOOP_OVER_LINKED_LIST(S, build_vertex, L)
|
|
|
|
if (sf == S->as_source_file)
|
|
|
|
return TRUE;
|
|
|
|
return FALSE;
|
2020-02-26 12:02:06 +02:00
|
|
|
}
|
|
|
|
|
2022-06-24 02:58:19 +03:00
|
|
|
@h Version.
|
|
|
|
|
|
|
|
=
|
|
|
|
semantic_version_number Projects::get_version(inform_project *proj) {
|
|
|
|
if (proj == NULL) return VersionNumbers::null();
|
|
|
|
return proj->version;
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h The project's languages.
|
|
|
|
Inform's ability to work outside of English is limited, at present, but for
|
2020-05-05 01:34:55 +03:00
|
|
|
the sake of future improvements we want to distinguish three uses of natural
|
|
|
|
language. In principle, a project could use different languages for each of
|
|
|
|
these.
|
|
|
|
|
|
|
|
First, the "language of play" is the one in which dialogue is printed and parsed
|
|
|
|
at run-time.
|
|
|
|
|
|
|
|
=
|
2020-02-14 02:06:28 +02:00
|
|
|
inform_language *Projects::get_language_of_play(inform_project *proj) {
|
|
|
|
if (proj == NULL) return NULL;
|
|
|
|
return proj->language_of_play;
|
|
|
|
}
|
|
|
|
|
2020-05-05 01:34:55 +03:00
|
|
|
@ Second, the "language of index" is the one in which the Index of a project is
|
|
|
|
written.
|
|
|
|
|
|
|
|
=
|
2020-02-14 02:06:28 +02:00
|
|
|
void Projects::set_language_of_index(inform_project *proj, inform_language *L) {
|
|
|
|
if (proj == NULL) internal_error("no project");
|
|
|
|
proj->language_of_index = L;
|
|
|
|
}
|
|
|
|
inform_language *Projects::get_language_of_index(inform_project *proj) {
|
|
|
|
if (proj == NULL) return NULL;
|
|
|
|
return proj->language_of_index;
|
|
|
|
}
|
|
|
|
|
2020-05-05 01:34:55 +03:00
|
|
|
@ Third, the "language of syntax" is the one in which the source text of a
|
|
|
|
project is written. For the Basic Inform extension, for example, it is English.
|
|
|
|
|
|
|
|
=
|
2020-02-14 02:06:28 +02:00
|
|
|
inform_language *Projects::get_language_of_syntax(inform_project *proj) {
|
|
|
|
if (proj == NULL) return NULL;
|
|
|
|
return proj->language_of_syntax;
|
|
|
|
}
|
|
|
|
|
2022-06-23 19:37:16 +03:00
|
|
|
@ And this is where the languages of play and syntax are set, using metadata
|
|
|
|
previously extracted by //Projects::scan_bibliographic_data//. Note that they
|
|
|
|
are set only once, and can't be changed after that.
|
2020-05-05 23:59:02 +03:00
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::set_languages(inform_project *proj) {
|
|
|
|
if (proj == NULL) internal_error("no project");
|
2023-02-05 01:30:25 +02:00
|
|
|
|
|
|
|
text_stream *name = proj->name_of_language_of_syntax;
|
|
|
|
inform_language *L = Languages::find_for(name, Projects::nest_list(proj));
|
|
|
|
if (L) {
|
2023-02-06 00:40:29 +02:00
|
|
|
if (Languages::supports(L, WRITTEN_LSUPPORT)) {
|
2023-02-05 15:45:38 +02:00
|
|
|
proj->language_of_syntax = L;
|
|
|
|
Projects::add_language_extension_nest(proj);
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err,
|
|
|
|
"this project asks to be 'written in' a language which does not support that");
|
|
|
|
Copies::attach_error(proj->as_copy,
|
|
|
|
CopyErrors::new_T(LANGUAGE_DEFICIENT_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
2023-02-05 01:30:25 +02:00
|
|
|
} else {
|
2022-06-23 19:37:16 +03:00
|
|
|
build_vertex *RV = Graphs::req_vertex(
|
2023-02-05 01:30:25 +02:00
|
|
|
Requirements::any_version_of(Works::new(language_genre, name, I"")));
|
2022-06-23 19:37:16 +03:00
|
|
|
Graphs::need_this_to_build(proj->as_copy->vertex, RV);
|
|
|
|
}
|
2023-02-05 01:30:25 +02:00
|
|
|
|
|
|
|
name = proj->name_of_language_of_play;
|
|
|
|
L = Languages::find_for(name, Projects::nest_list(proj));
|
|
|
|
if (L) {
|
2023-02-06 00:40:29 +02:00
|
|
|
if (Languages::supports(L, PLAYED_LSUPPORT)) {
|
2023-02-05 15:45:38 +02:00
|
|
|
proj->language_of_play = L;
|
|
|
|
Projects::add_language_extension_nest(proj);
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err,
|
|
|
|
"this project asks to be 'played in' a language which does not support that");
|
|
|
|
Copies::attach_error(proj->as_copy,
|
|
|
|
CopyErrors::new_T(LANGUAGE_DEFICIENT_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
2022-06-23 19:37:16 +03:00
|
|
|
} else {
|
2023-02-05 01:30:25 +02:00
|
|
|
build_vertex *RV = Graphs::req_vertex(
|
|
|
|
Requirements::any_version_of(Works::new(language_genre, name, I"")));
|
|
|
|
Graphs::need_this_to_build(proj->as_copy->vertex, RV);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (Str::len(proj->name_of_language_of_index) == 0)
|
|
|
|
proj->language_of_index = proj->language_of_syntax;
|
|
|
|
else {
|
|
|
|
name = proj->name_of_language_of_index;
|
|
|
|
L = Languages::find_for(name, Projects::nest_list(proj));
|
2022-06-23 19:37:16 +03:00
|
|
|
if (L) {
|
2023-02-06 00:40:29 +02:00
|
|
|
if (Languages::supports(L, INDEXED_LSUPPORT)) {
|
2023-02-05 15:45:38 +02:00
|
|
|
proj->language_of_index = L;
|
|
|
|
Projects::add_language_extension_nest(proj);
|
|
|
|
} else {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err,
|
|
|
|
"this project asks to be 'indexed in' a language which does not support that");
|
|
|
|
Copies::attach_error(proj->as_copy,
|
|
|
|
CopyErrors::new_T(LANGUAGE_DEFICIENT_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
2022-06-23 19:37:16 +03:00
|
|
|
} else {
|
|
|
|
build_vertex *RV = Graphs::req_vertex(
|
|
|
|
Requirements::any_version_of(Works::new(language_genre, name, I"")));
|
|
|
|
Graphs::need_this_to_build(proj->as_copy->vertex, RV);
|
|
|
|
}
|
|
|
|
}
|
2020-05-05 23:59:02 +03:00
|
|
|
}
|
|
|
|
|
2020-05-05 01:34:55 +03:00
|
|
|
@h Miscellaneous metadata.
|
2020-05-05 23:59:02 +03:00
|
|
|
The following function transfers some of the command-line options into settings
|
|
|
|
for a specific project.
|
|
|
|
|
2020-05-05 01:34:55 +03:00
|
|
|
A project marked "fix RNG" will be compiled with the random-number generator
|
|
|
|
initially set to the seed value at run-time. (This sounds like work too junior
|
|
|
|
for a build manager to do, but it's controlled by a command-line switch,
|
|
|
|
and that means it's not beneath our notice.)
|
|
|
|
|
|
|
|
=
|
2020-05-05 23:59:02 +03:00
|
|
|
void Projects::set_compilation_options(inform_project *proj, int r, int co, int rng) {
|
|
|
|
proj->compile_for_release = r;
|
|
|
|
proj->compile_only = co;
|
|
|
|
proj->fix_rng = rng;
|
|
|
|
Projects::set_languages(proj);
|
2020-02-26 21:58:32 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
int Projects::currently_releasing(inform_project *proj) {
|
|
|
|
if (proj == NULL) return FALSE;
|
|
|
|
return proj->compile_for_release;
|
2020-02-12 01:20:14 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h Kit dependencies.
|
|
|
|
It is a practical impossibility to compile a story file without at least one
|
|
|
|
kit of pre-compiled Inter to merge into it, so all projects will depend on
|
|
|
|
at least one kit, and probably several.
|
2020-05-05 01:34:55 +03:00
|
|
|
|
|
|
|
=
|
|
|
|
typedef struct kit_dependency {
|
|
|
|
struct inform_kit *kit;
|
|
|
|
struct inform_language *because_of_language;
|
|
|
|
struct inform_kit *because_of_kit;
|
2020-05-09 15:07:39 +03:00
|
|
|
CLASS_DEFINITION
|
2020-05-05 01:34:55 +03:00
|
|
|
} kit_dependency;
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ =
|
2023-01-31 00:15:25 +02:00
|
|
|
int Projects::add_kit_dependency(inform_project *project, text_stream *kit_name,
|
2022-06-23 19:37:16 +03:00
|
|
|
inform_language *because_of_language, inform_kit *because_of_kit,
|
2022-12-11 01:50:28 +02:00
|
|
|
inbuild_requirement *req, linked_list *nests) {
|
2023-01-31 00:15:25 +02:00
|
|
|
if (Projects::uses_kit(project, kit_name)) return TRUE;
|
2022-12-11 01:50:28 +02:00
|
|
|
if (nests == NULL) nests = Projects::nest_list(project);
|
|
|
|
inform_kit *K = Kits::find_by_name(kit_name, nests, req);
|
2022-06-23 19:37:16 +03:00
|
|
|
if (K) {
|
|
|
|
kit_dependency *kd = CREATE(kit_dependency);
|
|
|
|
kd->kit = K;
|
|
|
|
kd->because_of_language = because_of_language;
|
|
|
|
kd->because_of_kit = because_of_kit;
|
|
|
|
ADD_TO_LINKED_LIST(kd, kit_dependency, project->kits_to_include);
|
2023-01-31 00:15:25 +02:00
|
|
|
return TRUE;
|
2022-06-23 19:37:16 +03:00
|
|
|
} else {
|
|
|
|
build_vertex *RV = Graphs::req_vertex(
|
|
|
|
Requirements::any_version_of(Works::new_raw(kit_genre, kit_name, I"")));
|
|
|
|
Graphs::need_this_to_build(project->as_copy->vertex, RV);
|
2023-01-31 00:15:25 +02:00
|
|
|
LOG("Required but could not find kit %S\n", kit_name);
|
|
|
|
return FALSE;
|
2022-06-23 19:37:16 +03:00
|
|
|
}
|
2020-02-12 01:20:14 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ This can also be used to test on the fly:
|
|
|
|
|
|
|
|
=
|
2020-02-12 01:20:14 +02:00
|
|
|
int Projects::uses_kit(inform_project *project, text_stream *name) {
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
|
|
|
if (Str::eq(kd->kit->as_copy->edition->work->title, name))
|
2020-02-12 01:20:14 +02:00
|
|
|
return TRUE;
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ Here's where we decide which kits are included.
|
|
|
|
|
|
|
|
=
|
2022-06-28 00:25:28 +03:00
|
|
|
int forcible_basic_mode = FALSE;
|
|
|
|
|
|
|
|
void Projects::enter_forcible_basic_mode(void) {
|
|
|
|
forcible_basic_mode = TRUE;
|
|
|
|
}
|
|
|
|
|
2020-02-12 01:20:14 +02:00
|
|
|
void Projects::finalise_kit_dependencies(inform_project *project) {
|
2020-05-05 23:59:02 +03:00
|
|
|
@<Add dependencies for the standard kits@>;
|
|
|
|
int parity = TRUE; @<Perform if-this-then-that@>;
|
|
|
|
parity = FALSE; @<Perform if-this-then-that@>;
|
|
|
|
@<Sort the kit dependency list into priority order@>;
|
|
|
|
@<Log what the dependencies actually were@>;
|
2022-06-28 00:25:28 +03:00
|
|
|
@<Police forcible basic mode@>;
|
2020-05-05 23:59:02 +03:00
|
|
|
}
|
|
|
|
|
2022-06-28 00:25:28 +03:00
|
|
|
@ Note that //CommandParserKit//, if depended, will cause a further dependency
|
2020-05-05 23:59:02 +03:00
|
|
|
on //WorldModelKit//, through the if-this-then-that mechanism.
|
|
|
|
|
|
|
|
@<Add dependencies for the standard kits@> =
|
2022-06-28 00:25:28 +03:00
|
|
|
int no_word_from_JSON = TRUE;
|
|
|
|
JSON_value *need;
|
|
|
|
LOOP_OVER_LINKED_LIST(need, JSON_value, project->kit_names_to_include) {
|
|
|
|
JSON_value *need_title = JSON::look_up_object(need, I"title");
|
|
|
|
inbuild_work *work = Works::new_raw(kit_genre, need_title->if_string, I"");
|
|
|
|
JSON_value *need_version = JSON::look_up_object(need, I"version");
|
|
|
|
inbuild_requirement *req;
|
|
|
|
if (need_version)
|
|
|
|
req = Requirements::new(work,
|
|
|
|
VersionNumberRanges::compatibility_range(
|
|
|
|
VersionNumbers::from_text(need_version->if_string)));
|
|
|
|
else
|
|
|
|
req = Requirements::any_version_of(work);
|
2022-12-11 01:50:28 +02:00
|
|
|
Projects::add_kit_dependency(project, need_title->if_string, NULL, NULL, req, NULL);
|
2022-06-28 00:25:28 +03:00
|
|
|
}
|
|
|
|
if (LinkedLists::len(project->kits_to_include) > 0) no_word_from_JSON = FALSE;
|
2022-12-11 01:50:28 +02:00
|
|
|
Projects::add_kit_dependency(project, I"BasicInformKit", NULL, NULL, NULL, NULL);
|
2023-06-08 00:26:16 +03:00
|
|
|
|
|
|
|
if (TargetVMs::is_16_bit(Supervisor::current_vm()))
|
|
|
|
Projects::add_kit_dependency(project, I"Architecture16Kit", NULL, NULL, NULL, NULL);
|
|
|
|
else
|
|
|
|
Projects::add_kit_dependency(project, I"Architecture32Kit", NULL, NULL, NULL, NULL);
|
|
|
|
|
2020-02-14 02:06:28 +02:00
|
|
|
inform_language *L = project->language_of_play;
|
2023-02-03 01:16:53 +02:00
|
|
|
if (L) {
|
|
|
|
Languages::add_kit_dependencies_to_project(L, project);
|
|
|
|
} else {
|
|
|
|
Copies::attach_error(project->as_copy,
|
|
|
|
CopyErrors::new_T(LANGUAGE_UNAVAILABLE_CE, -1,
|
|
|
|
project->name_of_language_of_play));
|
|
|
|
}
|
2022-09-08 01:51:43 +03:00
|
|
|
if ((no_word_from_JSON) && (forcible_basic_mode == FALSE))
|
2022-12-11 01:50:28 +02:00
|
|
|
Projects::add_kit_dependency(project, I"CommandParserKit", NULL, NULL, NULL, NULL);
|
2020-02-12 01:20:14 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ We perform this first with |parity| being |TRUE|, then |FALSE|.
|
|
|
|
|
|
|
|
@<Perform if-this-then-that@> =
|
|
|
|
int changes_made = TRUE;
|
|
|
|
while (changes_made) {
|
|
|
|
changes_made = FALSE;
|
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
|
|
|
if (Kits::perform_ittt(kd->kit, project, parity))
|
|
|
|
changes_made = TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
@ Lower-priority kits are merged into the Inter tree before higher ones,
|
|
|
|
because of the following sort:
|
2020-02-12 01:20:14 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@<Sort the kit dependency list into priority order@> =
|
2020-03-09 14:44:59 +02:00
|
|
|
linked_list *sorted = NEW_LINKED_LIST(kit_dependency);
|
2020-02-12 01:20:14 +02:00
|
|
|
for (int p=0; p<100; p++) {
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
|
|
|
if (kd->kit->priority == p)
|
|
|
|
ADD_TO_LINKED_LIST(kd, kit_dependency, sorted);
|
2020-02-12 01:20:14 +02:00
|
|
|
}
|
|
|
|
project->kits_to_include = sorted;
|
2020-03-09 14:44:59 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@<Log what the dependencies actually were@> =
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
2020-05-05 23:59:02 +03:00
|
|
|
LOG("Using Inform kit '%S' (priority %d).\n",
|
|
|
|
kd->kit->as_copy->edition->work->title, kd->kit->priority);
|
2020-02-12 01:20:14 +02:00
|
|
|
|
2022-06-28 00:25:28 +03:00
|
|
|
@<Police forcible basic mode@> =
|
|
|
|
if (forcible_basic_mode) {
|
|
|
|
int basic = TRUE;
|
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
|
|
|
if ((Str::eq(kd->kit->as_copy->edition->work->title, I"CommandParserKit")) ||
|
2022-09-17 21:49:46 +03:00
|
|
|
(Str::eq(kd->kit->as_copy->edition->work->title, I"WorldModelKit")) ||
|
|
|
|
(Str::eq(kd->kit->as_copy->edition->work->title, I"DialogueKit")))
|
2022-06-28 00:25:28 +03:00
|
|
|
basic = FALSE;
|
|
|
|
if (basic == FALSE) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err,
|
|
|
|
"the project_metadata.json file shows this cannot be built in basic mode");
|
|
|
|
Copies::attach_error(project->as_copy,
|
|
|
|
CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h Things to do with kits.
|
|
|
|
First up: these internal configuration files set up what "text" and "real number"
|
|
|
|
mean, for example, and are optionally included in kits. The following
|
|
|
|
reads them in for every kit which is included in the project.
|
2020-02-12 01:20:14 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
=
|
2020-02-12 01:20:14 +02:00
|
|
|
#ifdef CORE_MODULE
|
2020-08-01 13:35:56 +03:00
|
|
|
void Projects::load_built_in_kind_constructors(inform_project *project) {
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
2020-08-01 13:35:56 +03:00
|
|
|
Kits::load_built_in_kind_constructors(kd->kit);
|
2020-02-12 01:20:14 +02:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ Next, language element activation: this too is decided by kits.
|
2020-04-01 22:43:13 +03:00
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::activate_elements(inform_project *project) {
|
2022-09-01 02:14:18 +03:00
|
|
|
Features::activate_bare_minimum();
|
|
|
|
element_activation *EA;
|
|
|
|
LOOP_OVER_LINKED_LIST(EA, element_activation, project->activations) {
|
|
|
|
compiler_feature *P = Features::from_name(EA->element_name);
|
|
|
|
if (P == NULL) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "project metadata refers to unknown compiler feature '%S'", EA->element_name);
|
|
|
|
Copies::attach_error(project->as_copy, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
} else {
|
|
|
|
if (EA->activate) Features::activate(P);
|
|
|
|
else if (Features::deactivate(P) == FALSE) {
|
|
|
|
TEMPORARY_TEXT(err)
|
|
|
|
WRITE_TO(err, "project metadata asks to deactivate mandatory compiler feature '%S'",
|
|
|
|
EA->element_name);
|
|
|
|
Copies::attach_error(project->as_copy, CopyErrors::new_T(METADATA_MALFORMED_CE, -1, err));
|
|
|
|
DISCARD_TEXT(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
2020-04-01 22:43:13 +03:00
|
|
|
Kits::activate_elements(kd->kit);
|
2022-12-11 01:50:28 +02:00
|
|
|
LOG("Included by the end of the kit stage: "); Features::list(DL, TRUE, NULL);
|
|
|
|
LOG("\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
void Projects::activate_extension_elements(inform_project *project) {
|
2022-12-08 01:28:26 +02:00
|
|
|
inform_extension *ext;
|
|
|
|
LOOP_OVER_LINKED_LIST(ext, inform_extension, project->extensions_included)
|
2022-12-11 01:50:28 +02:00
|
|
|
Extensions::activate_elements(ext, project);
|
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
|
|
|
Kits::activate_elements(kd->kit);
|
2022-12-08 01:28:26 +02:00
|
|
|
|
2022-12-11 01:50:28 +02:00
|
|
|
LOG("Included by the end of the extension stage: "); Features::list(DL, TRUE, NULL);
|
2021-06-10 00:20:23 +03:00
|
|
|
LOG("\n");
|
2022-09-01 02:14:18 +03:00
|
|
|
LOG("Excluded: "); Features::list(DL, FALSE, NULL);
|
2021-06-10 00:20:23 +03:00
|
|
|
LOG("\n");
|
2020-02-12 01:20:14 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ And so is the question of whether the compiler is expected to compile a
|
|
|
|
|Main| function, or whether one has already been included in one of the kits.
|
|
|
|
|
|
|
|
=
|
2020-02-12 01:20:14 +02:00
|
|
|
int Projects::Main_defined(inform_project *project) {
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
|
|
|
if (kd->kit->defines_Main)
|
2020-02-12 01:20:14 +02:00
|
|
|
return TRUE;
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ The "index structure" is a kind of layout specification for the project
|
|
|
|
Index. Last kit wins:
|
|
|
|
|
|
|
|
=
|
2020-04-01 22:43:13 +03:00
|
|
|
text_stream *Projects::index_structure(inform_project *project) {
|
2020-02-12 01:20:14 +02:00
|
|
|
text_stream *I = NULL;
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
2020-04-01 22:43:13 +03:00
|
|
|
if (kd->kit->index_structure)
|
|
|
|
I = kd->kit->index_structure;
|
2020-02-12 01:20:14 +02:00
|
|
|
return I;
|
|
|
|
}
|
|
|
|
|
2023-05-02 21:07:44 +03:00
|
|
|
@ We can find a kit as used by a project:
|
|
|
|
|
|
|
|
=
|
|
|
|
inform_kit *Projects::get_linked_kit(inform_project *project, text_stream *name) {
|
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include) {
|
|
|
|
inform_kit *kit = kd->kit;
|
|
|
|
if (Str::eq_insensitive(kit->as_copy->edition->work->title, name))
|
|
|
|
return kit;
|
|
|
|
}
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
@ And find an exhaustive collection:
|
|
|
|
|
|
|
|
=
|
|
|
|
linked_list *Projects::list_of_kit_configurations(inform_project *project) {
|
|
|
|
linked_list *L = NEW_LINKED_LIST(kit_configuration);
|
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include) {
|
|
|
|
inform_kit *kit = kd->kit;
|
|
|
|
kit_configuration *kc;
|
|
|
|
LOOP_OVER_LINKED_LIST(kc, kit_configuration, kit->configurations)
|
|
|
|
ADD_TO_LINKED_LIST(kc, kit_configuration, L);
|
|
|
|
}
|
|
|
|
return L;
|
|
|
|
}
|
|
|
|
|
2020-02-12 01:20:14 +02:00
|
|
|
@ Every source text read into Inform is automatically prefixed by a few words
|
|
|
|
loading the fundamental "extensions" -- text such as "Include Basic Inform by
|
|
|
|
Graham Nelson." If Inform were a computer, this would be the BIOS which boots
|
|
|
|
up its operating system. Each kit can contribute such extensions, so there
|
|
|
|
may be multiple sentences, which we need to count up.
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::early_source_text(OUTPUT_STREAM, inform_project *project) {
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include)
|
|
|
|
Kits::early_source_text(OUT, kd->kit);
|
2020-02-12 01:20:14 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ The following is for passing requests to //inter//, which does not contain
|
|
|
|
//supervisor//, and so doesn't use the data structure //inform_kit//. That
|
|
|
|
means we can't give it a list of kits: we have to give it a list of their
|
|
|
|
details instead.
|
|
|
|
|
|
|
|
=
|
2021-08-10 13:01:03 +03:00
|
|
|
#ifdef PIPELINE_MODULE
|
2021-11-16 00:44:29 +02:00
|
|
|
linked_list *Projects::list_of_attachment_instructions(inform_project *project) {
|
|
|
|
linked_list *requirements_list = NEW_LINKED_LIST(attachment_instruction);
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, project->kits_to_include) {
|
|
|
|
inform_kit *K = kd->kit;
|
2022-01-02 23:29:43 +02:00
|
|
|
attachment_instruction *link = LoadBinaryKitsStage::new_requirement(
|
2020-02-12 01:20:14 +02:00
|
|
|
K->as_copy->location_if_path, K->attachment_point);
|
2021-11-16 00:44:29 +02:00
|
|
|
ADD_TO_LINKED_LIST(link, attachment_instruction, requirements_list);
|
2020-02-12 01:20:14 +02:00
|
|
|
}
|
|
|
|
return requirements_list;
|
2020-02-11 02:15:49 +02:00
|
|
|
}
|
2020-02-12 01:20:14 +02:00
|
|
|
#endif
|
2020-02-13 01:48:37 +02:00
|
|
|
|
2021-09-12 15:21:13 +03:00
|
|
|
@h File to write to.
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::set_primary_output(inform_project *proj, filename *F) {
|
|
|
|
proj->primary_output = F;
|
|
|
|
}
|
|
|
|
|
|
|
|
filename *Projects::get_primary_output(inform_project *proj) {
|
|
|
|
if (proj->primary_output) return proj->primary_output;
|
|
|
|
if (proj->stand_alone) {
|
|
|
|
return Filenames::set_extension(proj->primary_source,
|
|
|
|
TargetVMs::get_transpiled_extension(
|
|
|
|
Supervisor::current_vm()));
|
|
|
|
} else {
|
|
|
|
pathname *build_folder = Projects::build_path(proj);
|
|
|
|
return Filenames::in(build_folder, I"auto.inf");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-17 21:49:46 +03:00
|
|
|
@h Detecting dialogue.
|
|
|
|
There's an awkward timing issue with detecting dialogue in the source text.
|
|
|
|
The rule is that an Inform project should depend on DialogueKit if it contains
|
|
|
|
content under a dialogue section, but not otherwise. That in turn activates
|
|
|
|
the "dialogue" compiler feature. On the other hand, the source text also has
|
|
|
|
material placed under headings which are for use with dialogue only. So we
|
|
|
|
can't read the entire source text first and then decide: we have to switch
|
|
|
|
on the dialogue feature the moment any dialogue matter is found. This is
|
|
|
|
done by having the //syntax// module call the following:
|
|
|
|
|
|
|
|
=
|
|
|
|
inform_project *project_being_scanned = NULL;
|
|
|
|
void Projects::dialogue_present(void) {
|
|
|
|
if (project_being_scanned) {
|
2022-12-11 01:50:28 +02:00
|
|
|
Projects::add_kit_dependency(project_being_scanned, I"DialogueKit", NULL, NULL, NULL, NULL);
|
2022-09-17 21:49:46 +03:00
|
|
|
Projects::activate_elements(project_being_scanned);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h The full graph.
|
|
|
|
This can be quite grandiose even though most of it will never come to anything,
|
|
|
|
rather like a family tree for a minor European royal family.
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::construct_graph(inform_project *proj) {
|
|
|
|
if (proj == NULL) return;
|
|
|
|
if (proj->chosen_build_target == NULL) {
|
|
|
|
Projects::finalise_kit_dependencies(proj);
|
2022-09-17 21:49:46 +03:00
|
|
|
project_being_scanned = proj;
|
2023-07-23 17:28:37 +03:00
|
|
|
Copies::get_source_text(proj->as_copy, I"graphing project");
|
2022-09-17 21:49:46 +03:00
|
|
|
project_being_scanned = NULL;
|
2020-05-05 23:59:02 +03:00
|
|
|
build_vertex *V = proj->as_copy->vertex;
|
|
|
|
@<Construct the graph upstream of V@>;
|
|
|
|
@<Construct the graph downstream of V@>;
|
2020-05-06 01:52:20 +03:00
|
|
|
Projects::check_extension_versions(proj);
|
2020-05-05 23:59:02 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@ So the structure here is a simple chain of dependencies, but note that
|
|
|
|
they are upstream of the project's vertex |V|, not downstream:
|
|
|
|
= (text)
|
|
|
|
Blorb package --> Story file --> I6 file --> Inter in memory --> Project
|
|
|
|
inblorb inform6 inter (in inform7) inform7
|
|
|
|
=
|
|
|
|
When looking at pictures like this, we must remember that time runs opposite
|
|
|
|
to the arrows: that is, these are built from right to left. For example,
|
|
|
|
the story file is made before the blorb package is made. The make algorithm
|
|
|
|
builds this list in a depth-first way, rapidly running downstream as it
|
|
|
|
discovers things it must do, then slowly clawing back upstream, actually
|
|
|
|
performing those tasks. In the diagram, below each arrow from |A --> B| is
|
|
|
|
the tool needed to make |A| from |B|.
|
|
|
|
|
|
|
|
So where should it start? Not at |V|, the vertex representing the project
|
|
|
|
itself, but somewhere upstream. The code below looks at the project's
|
|
|
|
compilation settings and sets |proj->chosen_build_target| to this start
|
|
|
|
position. In a simple //inform7// usage, we'll have:
|
|
|
|
= (text)
|
|
|
|
Blorb package --> Story file --> I6 file --> Inter in memory --> Project
|
|
|
|
^
|
|
|
|
chosen target
|
|
|
|
=
|
|
|
|
so that we have a two-stage process: (i) generate inter code in memory,
|
|
|
|
and (ii) code-generate the I6 source code file from that. But in a more
|
|
|
|
elaborate use of //inblorb// to incrementally build a project, it will be:
|
|
|
|
= (text)
|
|
|
|
Blorb package --> Story file --> I6 file --> Inter in memory --> Project
|
|
|
|
^
|
|
|
|
chosen target
|
|
|
|
=
|
|
|
|
if we are releasing a bare story file, or
|
|
|
|
= (text)
|
|
|
|
Blorb package --> Story file --> I6 file --> Inter in memory --> Project
|
|
|
|
^
|
|
|
|
chosen target
|
|
|
|
=
|
|
|
|
for a release of a blorbed one.
|
|
|
|
|
|
|
|
@<Construct the graph upstream of V@> =
|
|
|
|
target_vm *VM = Supervisor::current_vm();
|
2021-09-12 15:21:13 +03:00
|
|
|
filename *inf_F = Projects::get_primary_output(proj);
|
2020-02-22 01:16:23 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
/* vertex for the inter code put together in memory */
|
2020-03-10 02:08:35 +02:00
|
|
|
build_vertex *inter_V = Graphs::file_vertex(inf_F);
|
2020-05-05 23:59:02 +03:00
|
|
|
Graphs::need_this_to_build(inter_V, V);
|
2020-02-24 11:48:40 +02:00
|
|
|
BuildSteps::attach(inter_V, compile_using_inform7_skill,
|
2020-05-05 23:59:02 +03:00
|
|
|
proj->compile_for_release, VM, NULL, proj->as_copy);
|
2020-02-24 11:48:40 +02:00
|
|
|
|
2021-09-12 15:21:13 +03:00
|
|
|
/* vertex for the final code file code-generated from that */
|
2020-02-22 01:16:23 +02:00
|
|
|
build_vertex *inf_V = Graphs::file_vertex(inf_F);
|
2021-09-12 15:21:13 +03:00
|
|
|
inf_V->always_build_dependencies = TRUE;
|
2020-02-24 11:48:40 +02:00
|
|
|
Graphs::need_this_to_build(inf_V, inter_V);
|
|
|
|
BuildSteps::attach(inf_V, code_generate_using_inter_skill,
|
2020-05-05 23:59:02 +03:00
|
|
|
proj->compile_for_release, VM, NULL, proj->as_copy);
|
2020-02-22 01:16:23 +02:00
|
|
|
|
2021-09-12 15:21:13 +03:00
|
|
|
if (Str::eq(TargetVMs::family(VM), I"Inform6")) {
|
|
|
|
pathname *build_folder = Projects::build_path(proj);
|
|
|
|
|
|
|
|
TEMPORARY_TEXT(story_file_leafname)
|
|
|
|
WRITE_TO(story_file_leafname, "output.%S", TargetVMs::get_unblorbed_extension(VM));
|
|
|
|
filename *unblorbed_F = Filenames::in(build_folder, story_file_leafname);
|
|
|
|
DISCARD_TEXT(story_file_leafname)
|
|
|
|
proj->unblorbed_vertex = Graphs::file_vertex(unblorbed_F);
|
|
|
|
Graphs::need_this_to_build(proj->unblorbed_vertex, inf_V);
|
|
|
|
BuildSteps::attach(proj->unblorbed_vertex, compile_using_inform6_skill,
|
|
|
|
proj->compile_for_release, VM, NULL, proj->as_copy);
|
|
|
|
|
|
|
|
TEMPORARY_TEXT(story_file_leafname2)
|
|
|
|
WRITE_TO(story_file_leafname2, "output.%S", TargetVMs::get_blorbed_extension(VM));
|
|
|
|
filename *blorbed_F = Filenames::in(build_folder, story_file_leafname2);
|
|
|
|
DISCARD_TEXT(story_file_leafname2)
|
|
|
|
proj->blorbed_vertex = Graphs::file_vertex(blorbed_F);
|
|
|
|
proj->blorbed_vertex->always_build_this = TRUE;
|
|
|
|
Graphs::need_this_to_build(proj->blorbed_vertex, proj->unblorbed_vertex);
|
|
|
|
BuildSteps::attach(proj->blorbed_vertex, package_using_inblorb_skill,
|
|
|
|
proj->compile_for_release, VM, NULL, proj->as_copy);
|
|
|
|
|
|
|
|
if (proj->compile_only) {
|
|
|
|
proj->chosen_build_target = inf_V;
|
|
|
|
inf_V->always_build_this = TRUE;
|
|
|
|
} else if (proj->compile_for_release) proj->chosen_build_target = proj->blorbed_vertex;
|
|
|
|
else proj->chosen_build_target = proj->unblorbed_vertex;
|
|
|
|
} else {
|
2020-05-05 23:59:02 +03:00
|
|
|
proj->chosen_build_target = inf_V;
|
2020-03-30 02:30:20 +03:00
|
|
|
inf_V->always_build_this = TRUE;
|
2021-09-12 15:21:13 +03:00
|
|
|
}
|
2020-05-05 23:59:02 +03:00
|
|
|
|
|
|
|
@ The graph also extends downstream of |V|, representing the things we will
|
|
|
|
need before we can run //inform7// on the project: and this is not a linear
|
|
|
|
run of arrows at all, but fans considerably outwards -- to its languages,
|
|
|
|
kits and extensions, and then to their dependencies in turn.
|
|
|
|
|
|
|
|
Note that the following does not create dependencies for extensions used by
|
|
|
|
the project: that's because //Copies::get_source_text// has already done so.
|
|
|
|
|
|
|
|
@<Construct the graph downstream of V@> =
|
|
|
|
@<The project depends on its source text@>;
|
|
|
|
@<The project depends on the kits it includes@>;
|
|
|
|
@<The project depends on the languages it is written in@>;
|
|
|
|
|
|
|
|
@<The project depends on its source text@> =
|
|
|
|
build_vertex *S;
|
|
|
|
LOOP_OVER_LINKED_LIST(S, build_vertex, Projects::source(proj))
|
|
|
|
Graphs::need_this_to_build(V, S);
|
|
|
|
|
|
|
|
@<The project depends on the kits it includes@> =
|
|
|
|
kit_dependency *kd;
|
|
|
|
LOOP_OVER_LINKED_LIST(kd, kit_dependency, proj->kits_to_include)
|
|
|
|
if ((kd->because_of_kit == NULL) && (kd->because_of_language == NULL))
|
|
|
|
Projects::graph_dependent_kit(proj, V, kd, FALSE);
|
2020-02-22 01:16:23 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@<The project depends on the languages it is written in@> =
|
|
|
|
inform_language *L = proj->language_of_play;
|
|
|
|
if (L) Projects::graph_dependent_language(proj, V, L, FALSE);
|
|
|
|
L = proj->language_of_syntax;
|
|
|
|
if (L) Projects::graph_dependent_language(proj, V, L, FALSE);
|
|
|
|
L = proj->language_of_index;
|
|
|
|
if (L) Projects::graph_dependent_language(proj, V, L, FALSE);
|
|
|
|
|
|
|
|
@ The point of these two functions is that if A uses B which uses C then we
|
|
|
|
want the dependencies |A -> B -> C| rather than |A -> B| together with |A -> C|.
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::graph_dependent_kit(inform_project *proj,
|
|
|
|
build_vertex *V, kit_dependency *kd, int use) {
|
2020-03-09 14:44:59 +02:00
|
|
|
build_vertex *KV = kd->kit->as_copy->vertex;
|
|
|
|
if (use) Graphs::need_this_to_use(V, KV);
|
|
|
|
else Graphs::need_this_to_build(V, KV);
|
2022-04-05 14:14:27 +03:00
|
|
|
inbuild_requirement *req;
|
|
|
|
LOOP_OVER_LINKED_LIST(req, inbuild_requirement, kd->kit->extensions)
|
|
|
|
Kits::add_extension_dependency(KV, req);
|
2020-03-09 14:44:59 +02:00
|
|
|
kit_dependency *kd2;
|
2020-05-05 23:59:02 +03:00
|
|
|
LOOP_OVER_LINKED_LIST(kd2, kit_dependency, proj->kits_to_include)
|
2020-03-09 14:44:59 +02:00
|
|
|
if ((kd2->because_of_kit == kd->kit) && (kd2->because_of_language == NULL))
|
2020-05-05 23:59:02 +03:00
|
|
|
Projects::graph_dependent_kit(proj, KV, kd2, TRUE);
|
2020-03-09 14:44:59 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
void Projects::graph_dependent_language(inform_project *proj,
|
|
|
|
build_vertex *V, inform_language *L, int use) {
|
2020-03-09 14:44:59 +02:00
|
|
|
build_vertex *LV = L->as_copy->vertex;
|
|
|
|
if (use) Graphs::need_this_to_use(V, LV);
|
|
|
|
else Graphs::need_this_to_build(V, LV);
|
|
|
|
kit_dependency *kd2;
|
2020-05-05 23:59:02 +03:00
|
|
|
LOOP_OVER_LINKED_LIST(kd2, kit_dependency, proj->kits_to_include)
|
2020-03-09 14:44:59 +02:00
|
|
|
if ((kd2->because_of_kit == NULL) && (kd2->because_of_language == L))
|
2020-05-05 23:59:02 +03:00
|
|
|
Projects::graph_dependent_kit(proj, LV, kd2, TRUE);
|
|
|
|
}
|
|
|
|
|
2020-05-06 01:52:20 +03:00
|
|
|
@ One last task. It's unlikely, but possible, that an extension has been
|
|
|
|
included in a project twice, for different reasons, but that the two
|
|
|
|
inclusions have requirements about the extension's version which can't
|
|
|
|
both be met. Therefore we run through the downstream vertices and check
|
|
|
|
each extension against the intersection of all requirements put on it:
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::check_extension_versions(inform_project *proj) {
|
|
|
|
Projects::check_extension_versions_d(proj, proj->as_copy->vertex);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Projects::check_extension_versions_d(inform_project *proj, build_vertex *V) {
|
|
|
|
if ((V->as_copy) && (V->as_copy->edition->work->genre == extension_genre)) {
|
2022-12-08 01:28:26 +02:00
|
|
|
inform_extension *E = Extensions::from_copy(V->as_copy);
|
2020-05-06 01:52:20 +03:00
|
|
|
if (Extensions::satisfies(E) == FALSE) {
|
|
|
|
copy_error *CE = CopyErrors::new_T(SYNTAX_CE, ExtVersionTooLow_SYNERROR,
|
|
|
|
I"two incompatible versions");
|
|
|
|
CopyErrors::supply_node(CE, Extensions::get_inclusion_sentence(E));
|
|
|
|
Copies::attach_error(proj->as_copy, CE);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
build_vertex *W;
|
|
|
|
LOOP_OVER_LINKED_LIST(W, build_vertex, V->build_edges)
|
|
|
|
Projects::check_extension_versions_d(proj, W);
|
|
|
|
LOOP_OVER_LINKED_LIST(W, build_vertex, V->use_edges)
|
|
|
|
Projects::check_extension_versions_d(proj, W);
|
|
|
|
}
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@h Reading the source text.
|
|
|
|
We cannot know what extensions a project needs without reading its source
|
|
|
|
text, where the Include... sentences are, and of course we cannot parse the
|
|
|
|
source text to find those unless the Preform grammar is in place.
|
|
|
|
|
|
|
|
But then we can make a syntax tree for the project. The large-scale structure is:
|
|
|
|
= (text)
|
|
|
|
root
|
|
|
|
Implied inclusions (level 0 heading)
|
|
|
|
"Include Basic Inform by Graham Nelson."
|
|
|
|
...
|
|
|
|
Source text from file 1 (level 0 heading)
|
|
|
|
...
|
|
|
|
Source text from file 2 (level 0 heading)
|
|
|
|
...
|
|
|
|
...
|
|
|
|
Invented sentences (level 0 heading)
|
|
|
|
"The colour understood is a colour that varies."
|
|
|
|
=
|
|
|
|
Once this is made, any Include... sentences are expanded into syntax trees
|
|
|
|
for the extensions they refer to, in a post-processing phase.
|
2020-03-09 14:44:59 +02:00
|
|
|
|
2020-05-19 18:36:50 +03:00
|
|
|
For a real-world example of the result, see //inform7: Performance Metrics//.
|
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
=
|
|
|
|
void Projects::read_source_text_for(inform_project *proj) {
|
2022-06-23 19:37:16 +03:00
|
|
|
inform_language *E = Languages::find_for(I"English", Projects::nest_list(proj));
|
|
|
|
if (E == NULL) return;
|
|
|
|
Languages::read_Preform_definition(E, proj->search_list);
|
|
|
|
if ((proj->language_of_syntax) && (proj->language_of_syntax != E)) {
|
|
|
|
if (Languages::read_Preform_definition(
|
|
|
|
proj->language_of_syntax, proj->search_list) == FALSE) {
|
|
|
|
copy_error *CE = CopyErrors::new_T(SYNTAX_CE, UnavailableLOS_SYNERROR,
|
|
|
|
proj->language_of_syntax->as_copy->edition->work->title);
|
|
|
|
Copies::attach_error(proj->as_copy, CE);
|
|
|
|
}
|
|
|
|
}
|
2020-05-06 17:31:53 +03:00
|
|
|
Sentences::set_start_of_source(sfsm, -1);
|
2020-02-17 00:01:50 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
parse_node *inclusions_heading, *implicit_heading;
|
|
|
|
@<First an implied super-heading for implied inclusions and the Options@>;
|
|
|
|
@<Then the syntax tree from the actual source text@>;
|
|
|
|
@<Lastly an implied heading for any inventions by the compiler@>;
|
|
|
|
@<Post-process the syntax tree@>;
|
2020-02-17 00:01:50 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
#ifndef CORE_MODULE
|
|
|
|
Copies::list_attached_errors(STDERR, proj->as_copy);
|
|
|
|
#endif
|
|
|
|
}
|
2020-03-04 21:34:23 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ Under the "Implied inclusions" heading come sentences to include the
|
|
|
|
extensions required by kits but not explicitly asked for in source text,
|
|
|
|
like Basic Inform or Standard Rules; and also any sentences in the
|
|
|
|
|Options.txt| file, if the user has one.
|
2020-03-07 10:46:43 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@<First an implied super-heading for implied inclusions and the Options@> =
|
2020-05-11 17:21:29 +03:00
|
|
|
inclusions_heading = Node::new(HEADING_NT);
|
|
|
|
Node::set_text(inclusions_heading,
|
2023-09-05 10:36:51 +03:00
|
|
|
Feeds::feed_C_string_expanding_strings(U"Implied inclusions"));
|
2020-05-11 17:21:29 +03:00
|
|
|
SyntaxTree::graft_sentence(proj->syntax_tree, inclusions_heading);
|
2022-11-02 01:56:24 +02:00
|
|
|
Headings::place_implied_level_0(proj->syntax_tree, inclusions_heading, proj->as_copy);
|
2020-03-07 10:46:43 +02:00
|
|
|
|
2020-03-31 02:17:21 +03:00
|
|
|
int wc = lexer_wordcount;
|
2020-06-28 01:18:54 +03:00
|
|
|
TEMPORARY_TEXT(early)
|
2020-05-05 23:59:02 +03:00
|
|
|
Projects::early_source_text(early, proj);
|
2020-05-12 12:00:53 +03:00
|
|
|
if (Str::len(early) > 0) Feeds::feed_text(early);
|
2020-06-28 01:18:54 +03:00
|
|
|
DISCARD_TEXT(early)
|
2020-05-05 23:59:02 +03:00
|
|
|
inbuild_nest *ext = Supervisor::external();
|
|
|
|
if (ext) OptionsFile::read(
|
|
|
|
Filenames::in(ext->location, I"Options.txt"));
|
2020-03-07 10:46:43 +02:00
|
|
|
wording early_W = Wordings::new(wc, lexer_wordcount-1);
|
|
|
|
|
2020-05-11 17:21:29 +03:00
|
|
|
int l = SyntaxTree::push_bud(proj->syntax_tree, inclusions_heading);
|
2020-05-05 23:59:02 +03:00
|
|
|
Sentences::break_into_project_copy(proj->syntax_tree, early_W, proj->as_copy, proj);
|
2020-05-11 17:21:29 +03:00
|
|
|
SyntaxTree::pop_bud(proj->syntax_tree, l);
|
2020-03-09 14:44:59 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ We don't need to make an implied heading here, because the sentence-breaker
|
|
|
|
in the //syntax// module does that automatically whenever it detects source
|
|
|
|
text originating in a different file; which, of course, will now happen, since
|
|
|
|
up to now the source text hasn't come from a file at all.
|
2020-03-04 21:34:23 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
The "start of source" is the word number of the first word of the first
|
|
|
|
source text file for the project, and we notify the sentence-breaker when
|
|
|
|
it comes.
|
2020-03-04 21:34:23 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@<Then the syntax tree from the actual source text@> =
|
|
|
|
int wc = lexer_wordcount;
|
|
|
|
int start_set = FALSE;
|
|
|
|
linked_list *L = Projects::source(proj);
|
|
|
|
build_vertex *N;
|
|
|
|
LOOP_OVER_LINKED_LIST(N, build_vertex, L) {
|
|
|
|
filename *F = N->as_file;
|
|
|
|
if (start_set == FALSE) {
|
|
|
|
start_set = TRUE;
|
2020-05-06 17:31:53 +03:00
|
|
|
Sentences::set_start_of_source(sfsm, lexer_wordcount);
|
2020-03-04 21:34:23 +02:00
|
|
|
}
|
2020-05-05 23:59:02 +03:00
|
|
|
N->as_source_file =
|
2023-07-13 02:23:12 +03:00
|
|
|
SourceText::read_file(proj->as_copy, F, N->source_source, TRUE);
|
2023-06-02 00:24:00 +03:00
|
|
|
SVEXPLAIN(1, "(from %f)\n", F);
|
2020-03-04 21:34:23 +02:00
|
|
|
}
|
2020-05-11 17:21:29 +03:00
|
|
|
int l = SyntaxTree::push_bud(proj->syntax_tree, proj->syntax_tree->root_node);
|
2020-05-05 23:59:02 +03:00
|
|
|
Sentences::break_into_project_copy(
|
|
|
|
proj->syntax_tree, Wordings::new(wc, lexer_wordcount-1), proj->as_copy, proj);
|
2020-05-11 17:21:29 +03:00
|
|
|
SyntaxTree::pop_bud(proj->syntax_tree, l);
|
2020-05-05 23:59:02 +03:00
|
|
|
|
|
|
|
@ Inventions are when the //inform7// compiler makes up extra sentences, not
|
|
|
|
in the source text as such. They all go under the following implied heading.
|
|
|
|
Note that we leave the tree with its attachment point under this heading,
|
|
|
|
ready for those inventions (if in fact there are any).
|
|
|
|
|
|
|
|
@<Lastly an implied heading for any inventions by the compiler@> =
|
2020-05-11 17:21:29 +03:00
|
|
|
int l = SyntaxTree::push_bud(proj->syntax_tree, proj->syntax_tree->root_node);
|
|
|
|
implicit_heading = Node::new(HEADING_NT);
|
|
|
|
Node::set_text(implicit_heading,
|
2023-09-05 10:36:51 +03:00
|
|
|
Feeds::feed_C_string_expanding_strings(U"Invented sentences"));
|
2020-05-11 17:21:29 +03:00
|
|
|
SyntaxTree::graft_sentence(proj->syntax_tree, implicit_heading);
|
2022-11-02 01:56:24 +02:00
|
|
|
Headings::place_implied_level_0(proj->syntax_tree, implicit_heading, proj->as_copy);
|
2020-05-11 17:21:29 +03:00
|
|
|
SyntaxTree::pop_bud(proj->syntax_tree, l);
|
|
|
|
SyntaxTree::push_bud(proj->syntax_tree, implicit_heading); /* never popped */
|
2020-03-03 13:02:46 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@ The ordering here is, as so often in this section of code, important. We
|
|
|
|
have to know which language elements are in use before we can safely look
|
|
|
|
for Include... sentences, because some of those sentences are conditional
|
|
|
|
on that. We have to perform the tree surgery asked for by Include... in
|
|
|
|
place of... instructions after the sweep for inclusions.
|
2020-03-03 13:02:46 +02:00
|
|
|
|
2020-05-05 23:59:02 +03:00
|
|
|
@<Post-process the syntax tree@> =
|
|
|
|
Projects::activate_elements(proj);
|
|
|
|
Inclusions::traverse(proj->as_copy, proj->syntax_tree);
|
2020-05-07 18:44:07 +03:00
|
|
|
Headings::satisfy_dependencies(proj, proj->syntax_tree, proj->as_copy);
|
2022-12-11 01:50:28 +02:00
|
|
|
Projects::activate_extension_elements(proj);
|
2022-06-23 19:37:16 +03:00
|
|
|
|
|
|
|
@h The bibliographic sentence.
|
|
|
|
It might seem sensible to parse the opening sentence of the source text,
|
|
|
|
the bibliographic sentence giving title and author, by looking at the result
|
|
|
|
of sentence-breaking: in other words, to wait until the syntax tree for a
|
|
|
|
project has been read in.
|
|
|
|
|
|
|
|
But this isn't fast enough, because the sentence also specifies the language
|
|
|
|
of syntax, and we need to know of any non-English choice immediately. We
|
|
|
|
don't even want to use Preform to parse the sentence, either, because we might
|
|
|
|
want to load a different Preform file depending on that non-English choice.
|
|
|
|
|
|
|
|
So the following rapid scan catches just the first sentence of the first
|
|
|
|
source file of the project.
|
|
|
|
|
|
|
|
@e BadTitleSentence_SYNERROR
|
|
|
|
|
|
|
|
=
|
|
|
|
void Projects::scan_bibliographic_data(inform_project *proj) {
|
|
|
|
linked_list *L = Projects::source(proj);
|
|
|
|
build_vertex *N;
|
|
|
|
LOOP_OVER_LINKED_LIST(N, build_vertex, L) {
|
|
|
|
filename *F = N->as_file;
|
|
|
|
FILE *SF = Filenames::fopen_caseless(F, "r");
|
|
|
|
if (SF == NULL) break; /* no source means no bibliographic data */
|
|
|
|
@<Read the opening sentence@>;
|
|
|
|
fclose(SF);
|
|
|
|
break; /* so that we look only at the first source file */
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Read the opening sentence@> =
|
|
|
|
TEMPORARY_TEXT(bibliographic_sentence)
|
|
|
|
TEMPORARY_TEXT(bracketed)
|
|
|
|
@<Capture the opening sentence and its bracketed part@>;
|
|
|
|
if ((Str::len(bibliographic_sentence) > 0) &&
|
|
|
|
(Str::get_first_char(bibliographic_sentence) == '"'))
|
|
|
|
@<The opening sentence is bibliographic, so scan it@>;
|
|
|
|
DISCARD_TEXT(bibliographic_sentence)
|
|
|
|
DISCARD_TEXT(bracketed)
|
|
|
|
|
|
|
|
@ A bibliographic sentence can optionally give a language, by use of "(in ...)":
|
|
|
|
|
|
|
|
>> "Bonjour Albertine" by Marcel Proust (in French)
|
|
|
|
|
|
|
|
If so, the following writes |"Bonjour Albertine" by Marcel Proust| to the
|
|
|
|
text |bibliographic_sentence| and |in French| to the text |bracketed|. If not,
|
|
|
|
the whole thing goes into |bibliographic_sentence| and |bracketed| is empty.
|
|
|
|
|
|
|
|
@<Capture the opening sentence and its bracketed part@> =
|
2023-09-06 15:19:32 +03:00
|
|
|
inchar32_t c;
|
|
|
|
int commented = FALSE, quoted = FALSE, rounded = FALSE, content_found = FALSE;
|
|
|
|
while ((c = TextFiles::utf8_fgetc(SF, NULL, NULL)) != CH32EOF) {
|
2022-06-23 19:37:16 +03:00
|
|
|
if (c == 0xFEFF) continue; /* skip the optional Unicode BOM pseudo-character */
|
|
|
|
if (commented) {
|
|
|
|
if (c == ']') commented = FALSE;
|
|
|
|
} else {
|
|
|
|
if (quoted) {
|
2023-09-06 15:19:32 +03:00
|
|
|
if (rounded) PUT_TO(bracketed, c);
|
|
|
|
else PUT_TO(bibliographic_sentence, c);
|
2022-06-23 19:37:16 +03:00
|
|
|
if (c == '"') quoted = FALSE;
|
|
|
|
} else {
|
|
|
|
if (c == '[') commented = TRUE;
|
|
|
|
else {
|
2023-09-06 15:19:32 +03:00
|
|
|
if (Characters::is_whitespace(c) == FALSE) content_found = TRUE;
|
2022-06-23 19:37:16 +03:00
|
|
|
if (rounded) {
|
|
|
|
if (c == '"') quoted = TRUE;
|
|
|
|
if ((c == '\x0a') || (c == '\x0d') || (c == '\n')) c = ' ';
|
|
|
|
if (c == ')') rounded = FALSE;
|
2023-09-06 15:19:32 +03:00
|
|
|
else PUT_TO(bracketed, c);
|
2022-06-23 19:37:16 +03:00
|
|
|
} else {
|
|
|
|
if (c == '(') rounded = TRUE;
|
|
|
|
else {
|
|
|
|
if ((c == '\x0a') || (c == '\x0d') || (c == '\n')) {
|
|
|
|
if (content_found) break;
|
|
|
|
c = ' ';
|
2023-09-06 15:19:32 +03:00
|
|
|
PUT_TO(bibliographic_sentence, c);
|
2022-06-23 19:37:16 +03:00
|
|
|
} else {
|
2023-09-06 15:19:32 +03:00
|
|
|
PUT_TO(bibliographic_sentence, c);
|
2022-06-23 19:37:16 +03:00
|
|
|
}
|
|
|
|
if (c == '"') quoted = TRUE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Str::trim_white_space(bibliographic_sentence);
|
|
|
|
Str::trim_white_space(bracketed);
|
|
|
|
if (Str::get_last_char(bibliographic_sentence) == '.')
|
|
|
|
Str::delete_last_character(bibliographic_sentence);
|
|
|
|
|
|
|
|
@ The author is sometimes given outside of quotation marks:
|
|
|
|
|
|
|
|
>> "The Large Scale Structure of Space-Time" by Lindsay Lohan
|
|
|
|
|
|
|
|
But not always:
|
|
|
|
|
|
|
|
>> "Greek Rural Postmen and Their Cancellation Numbers" by "will.i.am"
|
|
|
|
|
|
|
|
@<The opening sentence is bibliographic, so scan it@> =
|
|
|
|
match_results mr = Regexp::create_mr();
|
2023-09-05 10:36:51 +03:00
|
|
|
if (Regexp::match(&mr, bibliographic_sentence, U"\"([^\"]+)\" by \"([^\"]+)\"")) {
|
2022-06-23 19:37:16 +03:00
|
|
|
text_stream *title = mr.exp[0];
|
|
|
|
text_stream *author = mr.exp[1];
|
|
|
|
@<Set title and author@>;
|
2023-09-05 10:36:51 +03:00
|
|
|
} else if (Regexp::match(&mr, bibliographic_sentence, U"\"([^\"]+)\" by ([^\"]+)")) {
|
2022-06-23 19:37:16 +03:00
|
|
|
text_stream *title = mr.exp[0];
|
|
|
|
text_stream *author = mr.exp[1];
|
|
|
|
@<Set title and author@>;
|
2023-09-05 10:36:51 +03:00
|
|
|
} else if (Regexp::match(&mr, bibliographic_sentence, U"\"([^\"]+)\"")) {
|
2022-06-23 19:37:16 +03:00
|
|
|
text_stream *title = mr.exp[0];
|
|
|
|
text_stream *author = NULL;
|
|
|
|
@<Set title and author@>;
|
|
|
|
} else {
|
|
|
|
@<Flag bad bibliographic sentence@>;
|
|
|
|
}
|
|
|
|
Regexp::dispose_of(&mr);
|
2023-02-05 01:30:25 +02:00
|
|
|
if (Str::len(bracketed) > 0) {
|
|
|
|
int okay = TRUE;
|
|
|
|
match_results mr2 = Regexp::create_mr();
|
2023-09-05 10:36:51 +03:00
|
|
|
while (Regexp::match(&mr2, bracketed, U"(%c+?),(%c+)")) {
|
2023-02-05 01:30:25 +02:00
|
|
|
okay = (okay && (Projects::parse_language_clauses(proj, mr2.exp[0])));
|
|
|
|
bracketed = Str::duplicate(mr2.exp[1]);
|
|
|
|
}
|
|
|
|
okay = (okay && (Projects::parse_language_clauses(proj, bracketed)));
|
|
|
|
if (okay == FALSE) @<Flag bad bibliographic sentence@>;
|
|
|
|
Regexp::dispose_of(&mr2);
|
2022-06-23 19:37:16 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
@<Set title and author@> =
|
|
|
|
if (Str::len(title) > 0) {
|
|
|
|
text_stream *T = proj->as_copy->edition->work->title;
|
|
|
|
Str::clear(T);
|
|
|
|
WRITE_TO(T, "%S", title);
|
|
|
|
}
|
|
|
|
if (Str::len(author) > 0) {
|
|
|
|
if (proj->as_copy->edition->work->author_name == NULL)
|
|
|
|
proj->as_copy->edition->work->author_name = Str::new();
|
|
|
|
text_stream *A = proj->as_copy->edition->work->author_name;
|
|
|
|
Str::clear(A);
|
|
|
|
WRITE_TO(A, "%S", author);
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Flag bad bibliographic sentence@> =
|
|
|
|
copy_error *CE = CopyErrors::new(SYNTAX_CE, BadTitleSentence_SYNERROR);
|
|
|
|
Copies::attach_error(proj->as_copy, CE);
|
2023-02-05 01:30:25 +02:00
|
|
|
|
|
|
|
@
|
|
|
|
|
|
|
|
=
|
|
|
|
int Projects::parse_language_clauses(inform_project *proj, text_stream *clause) {
|
|
|
|
int verdict = FALSE;
|
|
|
|
match_results mr = Regexp::create_mr();
|
2023-09-05 10:36:51 +03:00
|
|
|
if (Regexp::match(&mr, clause, U"(%c+?) in (%c+)")) {
|
2023-02-05 01:30:25 +02:00
|
|
|
text_stream *what = mr.exp[0];
|
|
|
|
text_stream *language_name = mr.exp[1];
|
|
|
|
verdict = Projects::parse_language_clause(proj, what, language_name);
|
2023-09-05 10:36:51 +03:00
|
|
|
} else if (Regexp::match(&mr, clause, U" *in (%c+)")) {
|
2023-02-05 01:30:25 +02:00
|
|
|
text_stream *what = I"played";
|
|
|
|
text_stream *language_name = mr.exp[0];
|
|
|
|
verdict = Projects::parse_language_clause(proj, what, language_name);
|
2023-09-05 10:36:51 +03:00
|
|
|
} else if (Regexp::match(&mr, clause, U" *")) {
|
2023-02-05 01:30:25 +02:00
|
|
|
verdict = TRUE;
|
|
|
|
}
|
|
|
|
Regexp::dispose_of(&mr);
|
|
|
|
return verdict;
|
|
|
|
}
|
|
|
|
|
|
|
|
int Projects::parse_language_clause(inform_project *proj, text_stream *what, text_stream *language_name) {
|
|
|
|
match_results mr = Regexp::create_mr();
|
|
|
|
int verdict = FALSE;
|
2023-09-05 10:36:51 +03:00
|
|
|
if (Regexp::match(&mr, what, U"(%c+?), and (%c+)")) {
|
2023-02-05 01:30:25 +02:00
|
|
|
verdict = ((Projects::parse_language_clause(proj, mr.exp[0], language_name)) &&
|
|
|
|
(Projects::parse_language_clause(proj, mr.exp[1], language_name)));
|
2023-09-05 10:36:51 +03:00
|
|
|
} else if (Regexp::match(&mr, what, U"(%c+?), (%c+)")) {
|
2023-02-05 01:30:25 +02:00
|
|
|
verdict = ((Projects::parse_language_clause(proj, mr.exp[0], language_name)) &&
|
|
|
|
(Projects::parse_language_clause(proj, mr.exp[1], language_name)));
|
2023-09-05 10:36:51 +03:00
|
|
|
} else if (Regexp::match(&mr, what, U"(%c+?) and (%c+)")) {
|
2023-02-05 01:30:25 +02:00
|
|
|
verdict = ((Projects::parse_language_clause(proj, mr.exp[0], language_name)) &&
|
|
|
|
(Projects::parse_language_clause(proj, mr.exp[1], language_name)));
|
|
|
|
} else {
|
2023-09-05 10:36:51 +03:00
|
|
|
if (Regexp::match(&mr, what, U" *written *")) @<Set language of syntax@>
|
|
|
|
else if (Regexp::match(&mr, what, U" *played *")) @<Set language of play@>
|
|
|
|
else if (Regexp::match(&mr, what, U" *indexed *")) @<Set language of index@>
|
2023-02-05 01:30:25 +02:00
|
|
|
}
|
|
|
|
Regexp::dispose_of(&mr);
|
|
|
|
return verdict;
|
|
|
|
}
|
|
|
|
|
|
|
|
@<Set language of play@> =
|
|
|
|
proj->name_of_language_of_play = Str::duplicate(language_name);
|
|
|
|
Str::trim_white_space(proj->name_of_language_of_play);
|
|
|
|
verdict = TRUE;
|
|
|
|
|
|
|
|
@<Set language of syntax@> =
|
|
|
|
proj->name_of_language_of_syntax = Str::duplicate(language_name);
|
|
|
|
Str::trim_white_space(proj->name_of_language_of_syntax);
|
|
|
|
verdict = TRUE;
|
|
|
|
|
|
|
|
@<Set language of index@> =
|
|
|
|
proj->name_of_language_of_index = Str::duplicate(language_name);
|
|
|
|
Str::trim_white_space(proj->name_of_language_of_index);
|
|
|
|
verdict = TRUE;
|
2023-07-13 02:23:12 +03:00
|
|
|
|
|
|
|
@h Performing the census.
|
|
|
|
For some reason a census often makes a good story (cf. Luke 2:1-5), but here
|
|
|
|
there's disappointingly little to tell, because the work is all done by a
|
|
|
|
single call to //Nests::search_for//.
|
|
|
|
|
|
|
|
What we return is "a list of all extensions normally visible to the project",
|
|
|
|
which means, those built in to Inform, and those installed in its materials
|
|
|
|
directory.
|
|
|
|
|
|
|
|
=
|
|
|
|
linked_list *Projects::perform_census(inform_project *proj) {
|
|
|
|
if (proj == NULL) internal_error("no project");
|
|
|
|
|
|
|
|
linked_list *search_list = NEW_LINKED_LIST(inbuild_search_result);
|
|
|
|
if (Projects::materials_nest(proj))
|
|
|
|
ADD_TO_LINKED_LIST(Projects::materials_nest(proj), inbuild_nest, search_list);
|
|
|
|
if (Supervisor::internal())
|
|
|
|
ADD_TO_LINKED_LIST(Supervisor::internal(), inbuild_nest, search_list);
|
|
|
|
|
|
|
|
linked_list *census = NEW_LINKED_LIST(inbuild_nest);
|
|
|
|
inbuild_requirement *req = Requirements::anything_of_genre(extension_genre);
|
|
|
|
Nests::search_for(req, search_list, census);
|
|
|
|
return census;
|
|
|
|
}
|