release 2025-11-15
This commit is contained in:
commit
6c0af4520e
10
.claude/settings.local.json
Normal file
10
.claude/settings.local.json
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(find:*)",
|
||||
"Bash(for file in crawlerla_*)"
|
||||
],
|
||||
"deny": [],
|
||||
"ask": []
|
||||
}
|
||||
}
|
||||
370
.editorconfig
Normal file
370
.editorconfig
Normal file
@ -0,0 +1,370 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
|
||||
charset = utf-8
|
||||
|
||||
# Indentation and spacing
|
||||
indent_size = 4
|
||||
indent_style = space
|
||||
tab_width = 4
|
||||
|
||||
# New line preferences
|
||||
#end_of_line = crlf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 120
|
||||
|
||||
#### .NET Coding Conventions ####
|
||||
|
||||
# Organize usings
|
||||
#dotnet_separate_import_directive_groups = false
|
||||
#dotnet_sort_system_directives_first = true
|
||||
|
||||
# this. and Me. preferences
|
||||
dotnet_style_qualification_for_event = false:suggestion
|
||||
dotnet_style_qualification_for_field = false:suggestion
|
||||
dotnet_style_qualification_for_method = false:suggestion
|
||||
dotnet_style_qualification_for_property = false:suggestion
|
||||
|
||||
# Language keywords vs BCL types preferences
|
||||
dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion
|
||||
dotnet_style_predefined_type_for_member_access = true:suggestion
|
||||
|
||||
# Parentheses preferences
|
||||
dotnet_style_parentheses_in_arithmetic_binary_operators = never_if_unnecessary:suggestion
|
||||
dotnet_style_parentheses_in_other_binary_operators = never_if_unnecessary:suggestion
|
||||
dotnet_style_parentheses_in_relational_binary_operators = never_if_unnecessary:suggestion
|
||||
|
||||
# Modifier preferences
|
||||
dotnet_style_require_accessibility_modifiers = for_non_interface_members:suggestion
|
||||
|
||||
# Expression-level preferences
|
||||
#dotnet_style_coalesce_expression = true:suggestion
|
||||
#dotnet_style_collection_initializer = true:suggestion
|
||||
#dotnet_style_explicit_tuple_names = true:suggestion
|
||||
#dotnet_style_null_propagation = true:suggestion
|
||||
#dotnet_style_object_initializer = true:suggestion
|
||||
#dotnet_style_prefer_auto_properties = true:silent
|
||||
#dotnet_style_prefer_compound_assignment = true:suggestion
|
||||
#dotnet_style_prefer_conditional_expression_over_assignment = true:silent
|
||||
#dotnet_style_prefer_conditional_expression_over_return = true:silent
|
||||
#dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion
|
||||
#dotnet_style_prefer_inferred_tuple_names = true:suggestion
|
||||
#dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion
|
||||
#dotnet_style_prefer_simplified_interpolation = true:suggestion
|
||||
|
||||
# Field preferences
|
||||
#dotnet_style_readonly_field = true:suggestion
|
||||
|
||||
# Parameter preferences
|
||||
#dotnet_code_quality_unused_parameters = all:suggestion
|
||||
|
||||
#### C# Coding Conventions ####
|
||||
|
||||
# var preferences
|
||||
csharp_style_var_elsewhere = true:suggestion
|
||||
csharp_style_var_for_built_in_types = true:suggestion
|
||||
csharp_style_var_when_type_is_apparent = true:suggestion
|
||||
|
||||
# Expression-bodied members
|
||||
csharp_style_expression_bodied_accessors = true:suggestion
|
||||
csharp_style_expression_bodied_constructors = false:suggestion
|
||||
#csharp_style_expression_bodied_indexers = true:silent
|
||||
#csharp_style_expression_bodied_lambdas = true:silent
|
||||
#csharp_style_expression_bodied_local_functions = false:silent
|
||||
csharp_style_expression_bodied_methods = false:suggestion
|
||||
#csharp_style_expression_bodied_operators = false:silent
|
||||
csharp_style_expression_bodied_properties = true:suggestion
|
||||
|
||||
# Pattern matching preferences
|
||||
#csharp_style_pattern_matching_over_as_with_null_check = true:suggestion
|
||||
#csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion
|
||||
#csharp_style_prefer_switch_expression = true:suggestion
|
||||
|
||||
# Null-checking preferences
|
||||
#csharp_style_conditional_delegate_call = true:suggestion
|
||||
|
||||
# Modifier preferences
|
||||
#csharp_prefer_static_local_function = true:suggestion
|
||||
csharp_preferred_modifier_order = public, private, protected, internal, new, abstract, virtual, sealed, override, static, readonly, extern, unsafe, volatile, async:suggestion
|
||||
|
||||
# Code-block preferences
|
||||
#csharp_prefer_braces = true:silent
|
||||
#csharp_prefer_simple_using_statement = true:suggestion
|
||||
|
||||
# Expression-level preferences
|
||||
#csharp_prefer_simple_default_expression = true:suggestion
|
||||
#csharp_style_deconstructed_variable_declaration = true:suggestion
|
||||
#csharp_style_inlined_variable_declaration = true:suggestion
|
||||
#csharp_style_pattern_local_over_anonymous_function = true:suggestion
|
||||
#csharp_style_prefer_index_operator = true:suggestion
|
||||
#csharp_style_prefer_range_operator = true:suggestion
|
||||
#csharp_style_throw_expression = true:suggestion
|
||||
#csharp_style_unused_value_assignment_preference = discard_variable:suggestion
|
||||
#csharp_style_unused_value_expression_statement_preference = discard_variable:silent
|
||||
|
||||
# 'using' directive preferences
|
||||
csharp_using_directive_placement = outside_namespace:silent
|
||||
|
||||
#### C# Formatting Rules ####
|
||||
|
||||
# New line preferences
|
||||
csharp_new_line_before_catch = true
|
||||
csharp_new_line_before_else = true
|
||||
csharp_new_line_before_finally = true
|
||||
#csharp_new_line_before_members_in_anonymous_types = true
|
||||
csharp_new_line_before_members_in_object_initializers = false
|
||||
csharp_new_line_before_open_brace = all
|
||||
csharp_new_line_between_query_expression_clauses = true
|
||||
resharper_csharp_place_simple_embedded_statement_on_same_line = never
|
||||
resharper_csharp_keep_existing_embedded_arrangement = false
|
||||
|
||||
# Indentation preferences
|
||||
#csharp_indent_block_contents = true
|
||||
csharp_indent_braces = false
|
||||
#csharp_indent_case_contents = true
|
||||
#csharp_indent_case_contents_when_block = true
|
||||
#csharp_indent_labels = one_less_than_current
|
||||
csharp_indent_switch_labels = true
|
||||
xmldoc_indent_text = zeroindent
|
||||
|
||||
# Space preferences
|
||||
csharp_space_after_cast = false
|
||||
csharp_space_after_colon_in_inheritance_clause = true
|
||||
csharp_space_after_comma = true
|
||||
csharp_space_after_dot = false
|
||||
csharp_space_after_keywords_in_control_flow_statements = true
|
||||
csharp_space_after_semicolon_in_for_statement = true
|
||||
csharp_space_around_binary_operators = before_and_after
|
||||
#csharp_space_around_declaration_statements = false
|
||||
csharp_space_before_colon_in_inheritance_clause = true
|
||||
csharp_space_before_comma = false
|
||||
csharp_space_before_dot = false
|
||||
csharp_space_before_open_square_brackets = false
|
||||
csharp_space_before_semicolon_in_for_statement = false
|
||||
csharp_space_between_empty_square_brackets = false
|
||||
csharp_space_between_method_call_empty_parameter_list_parentheses = false
|
||||
csharp_space_between_method_call_name_and_opening_parenthesis = false
|
||||
csharp_space_between_method_call_parameter_list_parentheses = false
|
||||
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
|
||||
csharp_space_between_method_declaration_name_and_open_parenthesis = false
|
||||
csharp_space_between_method_declaration_parameter_list_parentheses = false
|
||||
csharp_space_between_parentheses = false
|
||||
csharp_space_between_square_brackets = false
|
||||
|
||||
# Wrapping preferences
|
||||
csharp_preserve_single_line_blocks = true
|
||||
#csharp_preserve_single_line_statements = true
|
||||
|
||||
#### Naming styles ####
|
||||
|
||||
# Naming rules
|
||||
|
||||
#dotnet_naming_rule.interface_should_be_begins_with_i.severity = suggestion
|
||||
#dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface
|
||||
#dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i
|
||||
|
||||
#dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion
|
||||
#dotnet_naming_rule.types_should_be_pascal_case.symbols = types
|
||||
#dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case
|
||||
|
||||
#dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion
|
||||
#dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members
|
||||
#dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case
|
||||
|
||||
# Symbol specifications
|
||||
|
||||
#dotnet_naming_symbols.interface.applicable_kinds = interface
|
||||
#dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
|
||||
#dotnet_naming_symbols.interface.required_modifiers =
|
||||
|
||||
#dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum
|
||||
#dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
|
||||
#dotnet_naming_symbols.types.required_modifiers =
|
||||
|
||||
#dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method
|
||||
#dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
|
||||
#dotnet_naming_symbols.non_field_members.required_modifiers =
|
||||
|
||||
# Naming styles
|
||||
|
||||
#dotnet_naming_style.pascal_case.required_prefix =
|
||||
#dotnet_naming_style.pascal_case.required_suffix =
|
||||
#dotnet_naming_style.pascal_case.word_separator =
|
||||
#dotnet_naming_style.pascal_case.capitalization = pascal_case
|
||||
|
||||
#dotnet_naming_style.begins_with_i.required_prefix = I
|
||||
#dotnet_naming_style.begins_with_i.required_suffix =
|
||||
#dotnet_naming_style.begins_with_i.word_separator =
|
||||
#dotnet_naming_style.begins_with_i.capitalization = pascal_case
|
||||
|
||||
dotnet_diagnostic.ide0055.severity = warning
|
||||
|
||||
dotnet_naming_rule.constants_rule.severity = warning
|
||||
dotnet_naming_rule.constants_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.constants_rule.symbols = constants_symbols
|
||||
|
||||
dotnet_naming_rule.event_rule.severity = warning
|
||||
dotnet_naming_rule.event_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.event_rule.symbols = event_symbols
|
||||
|
||||
dotnet_naming_rule.interfaces_rule.severity = warning
|
||||
dotnet_naming_rule.interfaces_rule.style = i_upper_camel_case_style
|
||||
dotnet_naming_rule.interfaces_rule.symbols = interfaces_symbols
|
||||
|
||||
dotnet_naming_rule.locals_rule.severity = warning
|
||||
dotnet_naming_rule.locals_rule.style = lower_camel_case_style_1
|
||||
dotnet_naming_rule.locals_rule.symbols = locals_symbols
|
||||
|
||||
dotnet_naming_rule.local_constants_rule.severity = warning
|
||||
dotnet_naming_rule.local_constants_rule.style = lower_camel_case_style_1
|
||||
dotnet_naming_rule.local_constants_rule.symbols = local_constants_symbols
|
||||
|
||||
dotnet_naming_rule.local_functions_rule.severity = warning
|
||||
dotnet_naming_rule.local_functions_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.local_functions_rule.symbols = local_functions_symbols
|
||||
|
||||
dotnet_naming_rule.method_rule.severity = warning
|
||||
dotnet_naming_rule.method_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.method_rule.symbols = method_symbols
|
||||
|
||||
dotnet_naming_rule.parameters_rule.severity = warning
|
||||
dotnet_naming_rule.parameters_rule.style = lower_camel_case_style_1
|
||||
dotnet_naming_rule.parameters_rule.symbols = parameters_symbols
|
||||
|
||||
dotnet_naming_rule.private_constants_rule.severity = warning
|
||||
dotnet_naming_rule.private_constants_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.private_constants_rule.symbols = private_constants_symbols
|
||||
|
||||
dotnet_naming_rule.private_instance_fields_rule.severity = warning
|
||||
dotnet_naming_rule.private_instance_fields_rule.style = lower_camel_case_style
|
||||
dotnet_naming_rule.private_instance_fields_rule.symbols = private_instance_fields_symbols
|
||||
|
||||
dotnet_naming_rule.private_static_fields_rule.severity = warning
|
||||
dotnet_naming_rule.private_static_fields_rule.style = lower_camel_case_style
|
||||
dotnet_naming_rule.private_static_fields_rule.symbols = private_static_fields_symbols
|
||||
|
||||
dotnet_naming_rule.private_static_readonly_rule.severity = warning
|
||||
dotnet_naming_rule.private_static_readonly_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.private_static_readonly_rule.symbols = private_static_readonly_symbols
|
||||
|
||||
dotnet_naming_rule.property_rule.severity = warning
|
||||
dotnet_naming_rule.property_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.property_rule.symbols = property_symbols
|
||||
|
||||
dotnet_naming_rule.public_fields_rule.severity = warning
|
||||
dotnet_naming_rule.public_fields_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.public_fields_rule.symbols = public_fields_symbols
|
||||
|
||||
dotnet_naming_rule.static_readonly_rule.severity = warning
|
||||
dotnet_naming_rule.static_readonly_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.static_readonly_rule.symbols = static_readonly_symbols
|
||||
|
||||
dotnet_naming_rule.types_and_namespaces_rule.severity = warning
|
||||
dotnet_naming_rule.types_and_namespaces_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.types_and_namespaces_rule.symbols = types_and_namespaces_symbols
|
||||
|
||||
# Frontier: ignore underscore-prefixed namespaces
|
||||
dotnet_naming_rule.namespaces_rule.severity = none
|
||||
dotnet_naming_rule.namespaces_rule.style = upper_camel_case_style
|
||||
dotnet_naming_rule.namespaces_rule.symbols = namespaces_symbols
|
||||
# End Frontier: ignore underscore-prefixed namespaces
|
||||
|
||||
dotnet_naming_rule.type_parameters_rule.severity = warning
|
||||
dotnet_naming_rule.type_parameters_rule.style = t_upper_camel_case_style
|
||||
dotnet_naming_rule.type_parameters_rule.symbols = type_parameters_symbols
|
||||
|
||||
dotnet_naming_style.i_upper_camel_case_style.capitalization = pascal_case
|
||||
dotnet_naming_style.i_upper_camel_case_style.required_prefix = I
|
||||
|
||||
dotnet_naming_style.lower_camel_case_style.capitalization = camel_case
|
||||
dotnet_naming_style.lower_camel_case_style.required_prefix = _
|
||||
dotnet_naming_style.lower_camel_case_style_1.capitalization = camel_case
|
||||
|
||||
dotnet_naming_style.t_upper_camel_case_style.capitalization = pascal_case
|
||||
dotnet_naming_style.t_upper_camel_case_style.required_prefix = T
|
||||
dotnet_naming_style.upper_camel_case_style.capitalization = pascal_case
|
||||
|
||||
dotnet_naming_symbols.constants_symbols.applicable_accessibilities = public, internal, protected, protected_internal, private_protected
|
||||
dotnet_naming_symbols.constants_symbols.applicable_kinds = field
|
||||
dotnet_naming_symbols.constants_symbols.required_modifiers = const
|
||||
|
||||
dotnet_naming_symbols.event_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.event_symbols.applicable_kinds = event
|
||||
|
||||
dotnet_naming_symbols.interfaces_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.interfaces_symbols.applicable_kinds = interface
|
||||
|
||||
dotnet_naming_symbols.locals_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.locals_symbols.applicable_kinds = local
|
||||
|
||||
dotnet_naming_symbols.local_constants_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.local_constants_symbols.applicable_kinds = local
|
||||
dotnet_naming_symbols.local_constants_symbols.required_modifiers = const
|
||||
|
||||
dotnet_naming_symbols.local_functions_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.local_functions_symbols.applicable_kinds = local_function
|
||||
|
||||
dotnet_naming_symbols.method_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.method_symbols.applicable_kinds = method
|
||||
|
||||
dotnet_naming_symbols.parameters_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.parameters_symbols.applicable_kinds = parameter
|
||||
|
||||
dotnet_naming_symbols.private_constants_symbols.applicable_accessibilities = private
|
||||
dotnet_naming_symbols.private_constants_symbols.applicable_kinds = field
|
||||
dotnet_naming_symbols.private_constants_symbols.required_modifiers = const
|
||||
|
||||
dotnet_naming_symbols.private_instance_fields_symbols.applicable_accessibilities = private
|
||||
dotnet_naming_symbols.private_instance_fields_symbols.applicable_kinds = field
|
||||
|
||||
dotnet_naming_symbols.private_static_fields_symbols.applicable_accessibilities = private
|
||||
dotnet_naming_symbols.private_static_fields_symbols.applicable_kinds = field
|
||||
dotnet_naming_symbols.private_static_fields_symbols.required_modifiers = static
|
||||
|
||||
dotnet_naming_symbols.private_static_readonly_symbols.applicable_accessibilities = private
|
||||
dotnet_naming_symbols.private_static_readonly_symbols.applicable_kinds = field
|
||||
dotnet_naming_symbols.private_static_readonly_symbols.required_modifiers = static, readonly
|
||||
|
||||
dotnet_naming_symbols.property_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.property_symbols.applicable_kinds = property
|
||||
|
||||
dotnet_naming_symbols.public_fields_symbols.applicable_accessibilities = public, internal, protected, protected_internal, private_protected
|
||||
dotnet_naming_symbols.public_fields_symbols.applicable_kinds = field
|
||||
|
||||
dotnet_naming_symbols.static_readonly_symbols.applicable_accessibilities = public, internal, protected, protected_internal, private_protected
|
||||
dotnet_naming_symbols.static_readonly_symbols.applicable_kinds = field
|
||||
dotnet_naming_symbols.static_readonly_symbols.required_modifiers = static, readonly
|
||||
|
||||
# Frontier: ignore underscore-prefixed namespaces
|
||||
dotnet_naming_symbols.types_and_namespaces_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.types_and_namespaces_symbols.applicable_kinds = class, struct, enum, delegate
|
||||
|
||||
dotnet_naming_symbols.namespace_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.namespace_symbols.applicable_kinds = namespace
|
||||
# End Frontier: ignore underscore-prefixed namespaces
|
||||
|
||||
dotnet_naming_symbols.type_parameters_symbols.applicable_accessibilities = *
|
||||
dotnet_naming_symbols.type_parameters_symbols.applicable_kinds = type_parameter
|
||||
|
||||
# ReSharper properties
|
||||
resharper_braces_for_ifelse = required_for_multiline
|
||||
resharper_csharp_wrap_arguments_style = chop_if_long
|
||||
resharper_csharp_wrap_parameters_style = chop_if_long
|
||||
resharper_keep_existing_attribute_arrangement = true
|
||||
resharper_wrap_chained_binary_patterns = chop_if_long
|
||||
resharper_wrap_chained_method_calls = chop_if_long
|
||||
resharper_csharp_trailing_comma_in_multiline_lists = true
|
||||
resharper_csharp_qualified_using_at_nested_scope = false
|
||||
resharper_csharp_prefer_qualified_reference = false
|
||||
resharper_csharp_allow_alias = false
|
||||
|
||||
[*.{csproj,xml,yml,yaml,dll.config,msbuildproj,targets,props,json}]
|
||||
indent_size = 2
|
||||
|
||||
[nuget.config]
|
||||
indent_size = 2
|
||||
|
||||
[{*.yaml,*.yml}]
|
||||
ij_yaml_indent_sequence_value = false
|
||||
5
.envrc
Normal file
5
.envrc
Normal file
@ -0,0 +1,5 @@
|
||||
set -e
|
||||
if ! has nix_direnv_version || ! nix_direnv_version 3.0.6; then
|
||||
source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.6/direnvrc" "sha256-RYcUJaRMf8oF5LznDrlCXbkOQrywm0HDv1VjYGaJGdM="
|
||||
fi
|
||||
use flake
|
||||
64
.gitattributes
vendored
Normal file
64
.gitattributes
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
###############################################################################
|
||||
# Set default behavior to automatically normalize line endings.
|
||||
###############################################################################
|
||||
* text=auto
|
||||
|
||||
###############################################################################
|
||||
# Set default behavior for command prompt diff.
|
||||
#
|
||||
# This is need for earlier builds of msysgit that does not have it on by
|
||||
# default for csharp files.
|
||||
# Note: This is only used by command line
|
||||
###############################################################################
|
||||
*.cs diff=csharp
|
||||
|
||||
###############################################################################
|
||||
# Set the merge driver for project and solution files
|
||||
#
|
||||
# Merging from the command prompt will add diff markers to the files if there
|
||||
# are conflicts (Merging from VS is not affected by the settings below, in VS
|
||||
# the diff markers are never inserted). Diff markers may cause the following
|
||||
# file extensions to fail to load in VS. An alternative would be to treat
|
||||
# these files as binary and thus will always conflict and require user
|
||||
# intervention with every merge. To do so, just uncomment the entries below
|
||||
###############################################################################
|
||||
#*.sln merge=binary
|
||||
#*.csproj merge=binary
|
||||
#*.vbproj merge=binary
|
||||
#*.vcxproj merge=binary
|
||||
#*.vcproj merge=binary
|
||||
#*.dbproj merge=binary
|
||||
#*.fsproj merge=binary
|
||||
#*.lsproj merge=binary
|
||||
#*.wixproj merge=binary
|
||||
#*.modelproj merge=binary
|
||||
#*.sqlproj merge=binary
|
||||
#*.wwaproj merge=binary
|
||||
Resources/Maps/**.yml merge=mapping-merge-driver
|
||||
|
||||
###############################################################################
|
||||
# behavior for image files
|
||||
#
|
||||
# image files are treated as binary by default.
|
||||
###############################################################################
|
||||
#*.jpg binary
|
||||
#*.png binary
|
||||
#*.gif binary
|
||||
|
||||
###############################################################################
|
||||
# diff behavior for common document formats
|
||||
#
|
||||
# Convert binary document formats to text before diffing them. This feature
|
||||
# is only available from the command line. Turn it on by uncommenting the
|
||||
# entries below.
|
||||
###############################################################################
|
||||
#*.doc diff=astextplain
|
||||
#*.DOC diff=astextplain
|
||||
#*.docx diff=astextplain
|
||||
#*.DOCX diff=astextplain
|
||||
#*.dot diff=astextplain
|
||||
#*.DOT diff=astextplain
|
||||
#*.pdf diff=astextplain
|
||||
#*.PDF diff=astextplain
|
||||
#*.rtf diff=astextplain
|
||||
#*.RTF diff=astextplain
|
||||
58
.github/CODEOWNERS
vendored
Normal file
58
.github/CODEOWNERS
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
# Last match in file takes precedence.
|
||||
|
||||
# Sorting by path instead of by who added it one day :(
|
||||
# this isn't how codeowners rules work pls read the first comment instead of trying to force a sorting order
|
||||
|
||||
#/Resources/ConfigPresets/WizardsDen/ @crazybrain23
|
||||
#/Content.*/Administration/ @DrSmugleaf @crazybrain23
|
||||
#/Resources/ServerInfo/ @crazybrain23
|
||||
#/Resources/ServerInfo/Guidebook/ServerRules/ @crazybrain23
|
||||
|
||||
#/Resources/Prototypes/Maps/** @Emisse @ArtisticRoomba
|
||||
|
||||
#/Resources/Prototypes/Body/ @DrSmugleaf # suffering
|
||||
#/Resources/Prototypes/Entities/Mobs/Player/ @DrSmugleaf
|
||||
#/Resources/Prototypes/Entities/Mobs/Species/ @DrSmugleaf
|
||||
#/Resources/Prototypes/Guidebook/rules.yml @crazybrain23
|
||||
#/Content.*/Body/ @DrSmugleaf
|
||||
#/Content.YAMLLinter @DrSmugleaf
|
||||
#/Content.Shared/Damage/ @DrSmugleaf
|
||||
|
||||
#/Content.*/Anomaly/ @TheShuEd
|
||||
#/Resources/Prototypes/Entities/Structures/Specific/anomalies.yml @TheShuEd
|
||||
|
||||
#/Content.*/Forensics/ @ficcialfaint
|
||||
|
||||
# SKREEEE
|
||||
#/Content.*.Database/ @PJB3005 @DrSmugleaf
|
||||
#/Content.Shared.Database/Log*.cs @PJB3005 @DrSmugleaf @crazybrain23
|
||||
#/Pow3r/ @PJB3005
|
||||
#/Content.Server/Power/Pow3r/ @PJB3005
|
||||
|
||||
|
||||
# Sloth
|
||||
#/Content.*/Audio @metalgearsloth
|
||||
#/Content.*/Movement @metalgearsloth
|
||||
#/Content.*/NPC @metalgearsloth
|
||||
#/Content.*/Shuttles @metalgearsloth
|
||||
#/Content.*/Weapons @metalgearsloth
|
||||
|
||||
## Frontier:
|
||||
# Workflows & repo config
|
||||
/.github/ @Cheackraze
|
||||
|
||||
# Database
|
||||
/Content.*.Database/ @Cheackraze
|
||||
|
||||
# Config files
|
||||
/Resources/ConfigPresets/_NF/ @Cheackraze
|
||||
/Content.Shared/CCVar/ @Cheackraze
|
||||
/Content.Shared/*/CCVar/ @Cheackraze
|
||||
/Content.Shared/*/CCVars/ @Cheackraze
|
||||
|
||||
# Mapchecker
|
||||
#/.github/mapchecker/ @whatston3
|
||||
#/.github/workflows/frontier-mapchecker.yml @whatston3
|
||||
|
||||
# All Frontier Maps
|
||||
/Resources/Maps/_NF/ @arimah @MagnusCrowe
|
||||
7
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
7
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
contact_links:
|
||||
- name: Report a Security Vulnerability
|
||||
url: https://github.com/space-wizards/space-station-14/blob/master/SECURITY.md
|
||||
about: Please report security vulnerabilities privately so we can fix them before they are publicly disclosed.
|
||||
- name: Request a Feature
|
||||
url: https://discord.gg/rGvu9hKffJ
|
||||
about: Submit feature requests on our Discord server (https://discord.gg/rGvu9hKffJ).
|
||||
20
.github/ISSUE_TEMPLATE/issue_report.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/issue_report.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Report an Issue
|
||||
about: "..."
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## Description
|
||||
<!-- Explain your issue in detail. Issues without proper explanation are liable to be closed by maintainers. -->
|
||||
|
||||
**Reproduction**
|
||||
<!-- Include the steps to reproduce if applicable. -->
|
||||
|
||||
**Screenshots**
|
||||
<!-- If applicable, add screenshots to help explain your problem. -->
|
||||
|
||||
**Additional context**
|
||||
<!-- Add any other context about the problem here. Anything you think is related to the issue. -->
|
||||
18
.github/ISSUE_TEMPLATE/toolshed-feature-request.md
vendored
Normal file
18
.github/ISSUE_TEMPLATE/toolshed-feature-request.md
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
---
|
||||
name: Toolshed feature request
|
||||
about: Suggest a feature for Toolshed (for game admins/developers)
|
||||
title: "[TOOLSHED REQUEST]"
|
||||
labels: Toolshed
|
||||
assignees: moonheart08
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem/bug? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the command you'd like**
|
||||
A clear and concise description of what you want and what it should do.
|
||||
If you're a technical user (i.e. programmer) including type signatures is helpful.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
39
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
39
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
**ОПИСАНИЕ**
|
||||
|
||||
<!--здесь вы кратко описываете суть изменений, просто чтобы понимать, о чем ПР-->
|
||||
|
||||
**ИСТОЧНИК**
|
||||
|
||||
<!--заполняется в виде ссылки, если ПР портирован из другой сборки, в ином случае - удалить-->
|
||||
|
||||
<details>
|
||||
<summary>МЕДИА</summary>
|
||||
|
||||
-
|
||||
</details>
|
||||
|
||||
<!--сюда вы прикладываете скриншоты и видео, пустая строка промежуточная обязательна-->
|
||||
|
||||
<details>
|
||||
<summary>КАК ПРОВЕРИТЬ</summary>
|
||||
|
||||
- [ ]
|
||||
</details>
|
||||
|
||||
<!--здесь вы прописываете, какие действия необходимо провести, чтобы проверить ПР, пустая строка промежуточная обязательна-->
|
||||
|
||||
**ЧЕЙНДЖЛОГ**
|
||||
|
||||
:cl:
|
||||
- add:
|
||||
- remove:
|
||||
- tweak:
|
||||
- fix:
|
||||
|
||||
<!--здесь вы прописываете чейнджлог, который пойдет в наш дискорд, так что необходимо все расписывать подробно, например:
|
||||
:cl: Lemird
|
||||
- add: возвращен трусомат, ищите его в карго на каждой станции!
|
||||
- remove: удален трусомат
|
||||
- tweak: изменен состав трусомата
|
||||
- fix: пофикшены баги трусомата
|
||||
-->
|
||||
9
.github/config.yml
vendored
Normal file
9
.github/config.yml
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
Project-Condor:
|
||||
org: space-wizards
|
||||
project: 2
|
||||
inbox: Inbox
|
||||
labels:
|
||||
"W: In Progress": "In Progress"
|
||||
"W: Discussion": "Design and Discussion"
|
||||
"W: Backlog": "Backlog"
|
||||
"W: Next": "Next"
|
||||
81
.github/labeler.yml
vendored
Normal file
81
.github/labeler.yml
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
#"Map":
|
||||
# - changed-files:
|
||||
# - any-glob-to-any-file: "Resources/Maps/**/*.yml" # All .yml files in the Resources/Maps directory, recursive.
|
||||
|
||||
"Map-Admin":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "Resources/Maps/_NF/Admin/**/*.yml" # Grid Files
|
||||
|
||||
"Map-Bluespace":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "Resources/Maps/_NF/Bluespace/**/*.yml" # Grid Files
|
||||
|
||||
"Map-Dungeon":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "Resources/Maps/_NF/Dungeon/**/*.yml" # Grid Files
|
||||
|
||||
"Map-Outpost":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "Resources/Maps/_NF/Outpost/*.yml" # Map Files
|
||||
- "Resources/Prototypes/_NF/Maps/Outpost/*.yml" # Prototypes Files
|
||||
|
||||
"Map-Shuttle":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "Resources/Maps/_NF/Shuttles/**/*.yml" # Grid Files
|
||||
- "Resources/Prototypes/_NF/Shipyard/**/*.yml" # Prototypes Files
|
||||
|
||||
"Map-POI":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "Resources/Maps/_NF/POI/**/*.yml" # Grid Files
|
||||
- "Resources/Prototypes/_NF/Maps/POI/**/*.yml" # Prototypes Files
|
||||
|
||||
"Sprites":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "**/*.rsi/*.png"
|
||||
- "**/*.rsi/*.json"
|
||||
|
||||
"UI":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "**/*.xaml*"
|
||||
|
||||
"C#":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "**/*.cs"
|
||||
|
||||
"Shaders":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: '**/*.swsl'
|
||||
|
||||
"Audio":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: '**/*.ogg'
|
||||
|
||||
"No C#":
|
||||
- changed-files:
|
||||
- all-globs-to-all-files: "!**/*.cs"
|
||||
|
||||
"Docs":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "**/*.xml"
|
||||
- "**/*.md"
|
||||
|
||||
"FTL":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "Resources/Locale/**/*.ftl"
|
||||
|
||||
"YML":
|
||||
- all:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "**/*.yml"
|
||||
- all-globs-to-any-file:
|
||||
- "!Resources/Maps/_NF/**/*.yml"
|
||||
- "!Resources/Prototypes/Maps/_NF/**/*.yml"
|
||||
|
||||
"DB Migration":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "Content.Server.Database/Migrations/**/*.cs"
|
||||
1
.github/mapchecker/.gitignore
vendored
Normal file
1
.github/mapchecker/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
venv
|
||||
73
.github/mapchecker/README.md
vendored
Normal file
73
.github/mapchecker/README.md
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
# MapChecker
|
||||
|
||||
This directory contains tooling contributed by TsjipTsjip, initially to automate the process of checking if map
|
||||
contributions in PR's are valid. That is to say, it collects a list of prototypes in the `Resources/Prototypes`
|
||||
directory which are marked as `DO NOT MAP`, `DEBUG`, ... and verifies that map changes indeed do not use them.
|
||||
|
||||
## Usage
|
||||
|
||||
Glad I do not have to write this myself! Get detailed help information by running:
|
||||
`python3 .github/mapchecker/mapchecker.py --help`
|
||||
|
||||
The following help block is printed:
|
||||
```
|
||||
usage: mapchecker.py [-h] [-v] [-p PROTOTYPES_PATH [PROTOTYPES_PATH ...]] [-m MAP_PATH [MAP_PATH ...]] [-w WHITELIST]
|
||||
|
||||
Map prototype usage checker for Frontier Station 14.
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
-v, --verbose Sets log level to DEBUG if present, spitting out a lot more information. False by default,.
|
||||
-p PROTOTYPES_PATH [PROTOTYPES_PATH ...], --prototypes_path PROTOTYPES_PATH [PROTOTYPES_PATH ...]
|
||||
Directory holding entity prototypes. Default: All entity prototypes in the Frontier Station 14 codebase.
|
||||
-m MAP_PATH [MAP_PATH ...], --map_path MAP_PATH [MAP_PATH ...]
|
||||
Map PROTOTYPES or directory of map prototypes to check. Can mix and match.Default: All maps in the Frontier Station 14 codebase.
|
||||
-w WHITELIST, --whitelist WHITELIST
|
||||
YML file that lists map names and prototypes to allow for them.
|
||||
```
|
||||
|
||||
You should generally not need to configure `-p`, `-m` or `-w`, as they are autofilled with sensible defaults. You can do
|
||||
this:
|
||||
- Set `-p` to only check against prototypes in a specific directory.
|
||||
- Set `-m` to just check a specific map. (Make sure to **point it at the prototype**, not the map file itself!)
|
||||
- Set `-v` with `-m` set as per above to get detailed information about a possible rejection for just that map.
|
||||
|
||||
## Configuration
|
||||
|
||||
Matchers are set in `config.py`. Currently it has a global list of matchers that are not allowed anywhere, and a set
|
||||
of conditional matchers.
|
||||
|
||||
For each map, a set of applicable matchers is constructed according to this workflow:
|
||||
1. Add all global illegal matchers.
|
||||
2. Add all conditional matchers for non-matching shipyard groups
|
||||
3. Remove all conditional matchers from the matching shipyard group (if it exists), to support duplicates across
|
||||
shipyard groups
|
||||
|
||||
A match will attempt to match the following during prototype collection:
|
||||
- Prototype ID (contains matcher, case insensitive)
|
||||
- Prototype name (contains matcher, case insensitive)
|
||||
- Prototype suffixes (separated per `, `) (exact, case insensitive)
|
||||
|
||||
## Whitelisting
|
||||
|
||||
If a map has a prototype and you believe it should be whitelisted, add a key for your map name (the `id` field of the
|
||||
gameMap prototype), and add the prototype ID's to its list.
|
||||
|
||||
The whitelist the checker uses by default is `.github/mapchecker/whitelist.yml`.
|
||||
|
||||
## Shuttle group override
|
||||
|
||||
It is possible that a shuttle is set to group `None` because it is only used in custom shipyard listings. In this case,
|
||||
you can force the MapChecker script to treat it as a different shipyard group by adding the following to the vessel
|
||||
prototype:
|
||||
|
||||
```yml
|
||||
...
|
||||
group: None
|
||||
# Add this line below.
|
||||
mapchecker_group_override: ShipyardGroupHere
|
||||
...
|
||||
```
|
||||
|
||||
Note that for now this will cause a warning to be generated, but it will not cause a failure if the shuttle matches the
|
||||
criteria for the overridden group.
|
||||
70
.github/mapchecker/config.py
vendored
Normal file
70
.github/mapchecker/config.py
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
# List of matchers that are always illegal to use. These always supercede CONDITIONALLY_ILLEGAL_MATCHES.
|
||||
ILLEGAL_MATCHES = [
|
||||
"DO NOT MAP",
|
||||
"DoNotMap",
|
||||
"DEBUG",
|
||||
"Admeme",
|
||||
"CaptainSabre",
|
||||
"ClothingBeltSheath",
|
||||
"MagazinePistolHighCapacity",
|
||||
"MagazinePistolHighCapacityRubber",
|
||||
"EncryptionKeyCommand",
|
||||
"SurveillanceCameraWireless",
|
||||
"CrewMonitoringServer",
|
||||
"APCHighCapacity",
|
||||
"APCSuperCapacity",
|
||||
"APCHyperCapacity",
|
||||
"PDA",
|
||||
"SpawnPointPassenger",
|
||||
"Python",
|
||||
"SalvageShuttleMarker",
|
||||
"FTLPoint",
|
||||
]
|
||||
# List of specific legal entities that override the above. Does not check suffixes.
|
||||
LEGAL_OVERRIDES = [
|
||||
"ButtonFrameCautionSecurity", # red button
|
||||
"PosterLegitPDAAd",
|
||||
"ShowcaseRobot" # decoration
|
||||
]
|
||||
# List of matchers that are illegal to use, unless the map is a ship and the ship belongs to the keyed shipyard.
|
||||
CONDITIONALLY_ILLEGAL_MATCHES = {
|
||||
"Shipyard": [
|
||||
],
|
||||
"Scrap": [
|
||||
],
|
||||
"Expedition": [
|
||||
],
|
||||
"Custom": [
|
||||
],
|
||||
"Security": [ # These matchers are illegal unless the ship is part of the security shipyard.
|
||||
"Security", # Anything with the word security in it should also only be appearing on security ships.
|
||||
"Plastitanium", # Plastitanium walls should only be appearing on security ships.
|
||||
"Kammerer", # Opportunity
|
||||
"HighSecDoor",
|
||||
"ShuttleGun",
|
||||
],
|
||||
"Syndicate": [
|
||||
"Plastitanium", # And also on blackmarket ships cause syndicate.
|
||||
"ShuttleGun",
|
||||
],
|
||||
"BlackMarket": [
|
||||
"Plastitanium", # And also on blackmarket ships cause syndicate.
|
||||
"ShuttleGun",
|
||||
],
|
||||
"Sr": [
|
||||
],
|
||||
"Medical": [
|
||||
],
|
||||
# It is assumed that mapped instances of plastitanium, security gear, etc. are deemed acceptable
|
||||
"PointOfInterest": [
|
||||
"Plastitanium",
|
||||
"Security",
|
||||
"HighSecDoor",
|
||||
"WallPlastitaniumIndestructible",
|
||||
"WallPlastitaniumDiagonalIndestructible",
|
||||
"PlastititaniumWindowIndestructible",
|
||||
"PlastititaniumWindowDiagonalIndestructible",
|
||||
"ClosetMaintenanceFilledRandom",
|
||||
"ClosetWallMaintenanceFilledRandom",
|
||||
]
|
||||
}
|
||||
279
.github/mapchecker/mapchecker.py
vendored
Executable file
279
.github/mapchecker/mapchecker.py
vendored
Executable file
@ -0,0 +1,279 @@
|
||||
#! /bin/python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import yaml
|
||||
from typing import List, Dict
|
||||
|
||||
from util import get_logger, YamlLoaderIgnoringTags, check_prototype
|
||||
from config import CONDITIONALLY_ILLEGAL_MATCHES
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Set up argument parser.
|
||||
parser = argparse.ArgumentParser(description="Map prototype usage checker for Frontier Station 14.")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose",
|
||||
action='store_true',
|
||||
help="Sets log level to DEBUG if present, spitting out a lot more information. False by default,."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prototypes_path",
|
||||
help="Directory holding entity prototypes.\nDefault: All entity prototypes in the Frontier Station 14 codebase.",
|
||||
type=str,
|
||||
nargs="+", # We accept multiple directories, but need at least one.
|
||||
required=False,
|
||||
default=[
|
||||
"Resources/Prototypes/Entities", # Upstream
|
||||
"Resources/Prototypes/_NF/Entities", # NF
|
||||
"Resources/Prototypes/Nyanotrasen/Entities", # Nyanotrasen
|
||||
"Resources/Prototypes/_DV/Entities", # DeltaV
|
||||
]
|
||||
)
|
||||
parser.add_argument(
|
||||
"-m", "--map_path",
|
||||
help=(f"Map PROTOTYPES or directory of map prototypes to check. Can mix and match."
|
||||
f"Default: All maps in the Frontier Station 14 codebase."),
|
||||
type=str,
|
||||
nargs="+", # We accept multiple pathspecs, but need at least one.
|
||||
required=False,
|
||||
default=[
|
||||
"Resources/Prototypes/_NF/Maps/Outpost", # Frontier Outpost
|
||||
"Resources/Prototypes/_NF/PointsOfInterest", # Points of interest
|
||||
"Resources/Prototypes/_NF/Shipyard", # Shipyard ships.
|
||||
]
|
||||
)
|
||||
parser.add_argument(
|
||||
"-w", "--whitelist",
|
||||
help="YML file that lists map names and prototypes to allow for them.",
|
||||
type=str, # Using argparse.FileType here upsets os.isfile, we work around this.
|
||||
nargs=1,
|
||||
required=False,
|
||||
default=".github/mapchecker/whitelist.yml"
|
||||
)
|
||||
|
||||
# ==================================================================================================================
|
||||
# PHASE 0: Parse arguments and transform them into lists of files to work on.
|
||||
args = parser.parse_args()
|
||||
|
||||
# Set up logging session.
|
||||
logger = get_logger(args.verbose)
|
||||
logger.info("MapChecker starting up.")
|
||||
logger.debug("Verbosity enabled.")
|
||||
|
||||
# Set up argument collectors.
|
||||
proto_paths: List[str] = []
|
||||
map_proto_paths: List[str] = []
|
||||
whitelisted_protos: Dict[str, List[str]] = dict()
|
||||
whitelisted_maps: List[str] = []
|
||||
|
||||
# Validate provided arguments and collect file locations.
|
||||
for proto_path in args.prototypes_path: # All prototype paths must be directories.
|
||||
if os.path.isdir(proto_path) is False:
|
||||
logger.warning(f"Prototype path '{proto_path}' is not a directory. Continuing without it.")
|
||||
continue
|
||||
# Collect all .yml files in this directory.
|
||||
for root, dirs, files in os.walk(proto_path):
|
||||
for file in files:
|
||||
if file.endswith(".yml"):
|
||||
proto_paths.append(str(os.path.join(root, file)))
|
||||
for map_path in args.map_path: # All map paths must be files or directories.
|
||||
if os.path.isfile(map_path):
|
||||
# If it's a file, we just add it to the list.
|
||||
map_proto_paths.append(map_path)
|
||||
elif os.path.isdir(map_path):
|
||||
# If it's a directory, we add all .yml files in it to the list.
|
||||
for root, dirs, files in os.walk(map_path):
|
||||
for file in files:
|
||||
if file.endswith(".yml"):
|
||||
map_proto_paths.append(os.path.join(root, file))
|
||||
else:
|
||||
logger.warning(f"Map path '{map_path}' is not a file or directory. Continuing without it.")
|
||||
continue
|
||||
|
||||
# Validate whitelist, it has to be a file containing valid yml.
|
||||
if os.path.isfile(args.whitelist) is False:
|
||||
logger.warning(f"Whitelist '{args.whitelist}' is not a file. Continuing without it.")
|
||||
else:
|
||||
with open(args.whitelist, "r") as whitelist:
|
||||
file_data = yaml.load(whitelist, Loader=YamlLoaderIgnoringTags)
|
||||
if file_data is None:
|
||||
logger.warning(f"Whitelist '{args.whitelist}' is empty. Continuing without it.")
|
||||
else:
|
||||
for map_key in file_data:
|
||||
if file_data[map_key] is True:
|
||||
whitelisted_maps.append(map_key)
|
||||
elif file_data[map_key] is False:
|
||||
continue
|
||||
else:
|
||||
whitelisted_protos[map_key] = file_data[map_key]
|
||||
|
||||
# ==================================================================================================================
|
||||
# PHASE 1: Collect all prototypes in proto_paths that are suffixed with target suffixes.
|
||||
|
||||
# Set up collectors.
|
||||
illegal_prototypes: List[str] = list()
|
||||
conditionally_illegal_prototypes: Dict[str, List[str]] = dict()
|
||||
for key in CONDITIONALLY_ILLEGAL_MATCHES.keys(): # Ensure all keys have empty lists already, less work later.
|
||||
conditionally_illegal_prototypes[key] = list()
|
||||
|
||||
# Collect all prototypes and sort into the collectors.
|
||||
for proto_file in proto_paths:
|
||||
with open(proto_file, "r") as proto:
|
||||
logger.debug(f"Reading prototype file '{proto_file}'.")
|
||||
file_data = yaml.load(proto, Loader=YamlLoaderIgnoringTags)
|
||||
if file_data is None:
|
||||
continue
|
||||
|
||||
for item in file_data: # File data has blocks of things we need.
|
||||
if item["type"] != "entity":
|
||||
continue
|
||||
proto_id = item["id"]
|
||||
proto_name = item["name"] if "name" in item.keys() else ""
|
||||
if proto_name is None:
|
||||
proto_name = ""
|
||||
proto_suffixes = str(item["suffix"]).split(", ") if "suffix" in item.keys() else list()
|
||||
proto_categories = item["categories"] if "categories" in item.keys() else list()
|
||||
if proto_categories is None:
|
||||
proto_categories = list()
|
||||
|
||||
check_result = check_prototype(proto_id, proto_name, proto_suffixes, proto_categories)
|
||||
if check_result is False:
|
||||
illegal_prototypes.append(proto_id)
|
||||
elif check_result is not True:
|
||||
for key in check_result:
|
||||
conditionally_illegal_prototypes[key].append(proto_id)
|
||||
|
||||
# Log information.
|
||||
logger.info(f"Collected {len(illegal_prototypes)} illegal prototype matchers.")
|
||||
for key in conditionally_illegal_prototypes.keys():
|
||||
logger.info(f"Collected {len(conditionally_illegal_prototypes[key])} illegal prototype matchers, whitelisted "
|
||||
f"for shipyard group {key}.")
|
||||
for item in conditionally_illegal_prototypes[key]:
|
||||
logger.debug(f" - {item}")
|
||||
|
||||
# ==================================================================================================================
|
||||
# PHASE 2: Check all maps in map_proto_paths for illegal prototypes.
|
||||
|
||||
# Set up collectors.
|
||||
violations: Dict[str, List[str]] = dict()
|
||||
|
||||
# Check all maps for illegal prototypes.
|
||||
for map_proto in map_proto_paths:
|
||||
with open(map_proto, "r") as map:
|
||||
file_data = yaml.load(map, Loader=YamlLoaderIgnoringTags)
|
||||
if file_data is None:
|
||||
logger.warning(f"Map prototype '{map_proto}' is empty. Continuing without it.")
|
||||
continue
|
||||
|
||||
map_name = map_proto # The map name that will be reported over output.
|
||||
map_file_location = None
|
||||
shipyard_group = None # Shipyard group of this map, if it's a shuttle.
|
||||
# Shipyard override of this map, in the case it's a custom shipyard shuttle but needs to be treated as a
|
||||
# specific group.
|
||||
shipyard_override = None
|
||||
|
||||
# FIXME: this breaks down with multiple descriptions in one file.
|
||||
for item in file_data:
|
||||
if item["type"] == "gameMap":
|
||||
# This yaml entry is the map descriptor. Collect its file location and map name.
|
||||
if "id" in item.keys():
|
||||
map_name = item["id"]
|
||||
map_file_location = item["mapPath"] if "mapPath" in item.keys() else None
|
||||
elif item["type"] == "vessel":
|
||||
# This yaml entry is a vessel descriptor!
|
||||
shipyard_group = item["group"] if "group" in item.keys() else None
|
||||
shipyard_override = item["mapchecker_group_override"] if "mapchecker_group_override" in item.keys() else None
|
||||
elif item["type"] == "pointOfInterest":
|
||||
shipyard_group = "PointOfInterest"
|
||||
shipyard_override = item["mapchecker_group_override"] if "mapchecker_group_override" in item.keys() else None
|
||||
|
||||
if map_file_location is None:
|
||||
# Silently skip. If the map doesn't have a mapPath, it won't appear in game anyways.
|
||||
logger.debug(f"Map proto {map_proto} did not specify a map file location. Skipping.")
|
||||
continue
|
||||
|
||||
# CHECKPOINT - If the map_name is blanket-whitelisted, skip it, but log a warning.
|
||||
if map_name in whitelisted_maps:
|
||||
logger.warning(f"Map '{map_name}' (from prototype '{map_proto}') was blanket-whitelisted. Skipping it.")
|
||||
continue
|
||||
|
||||
if shipyard_override is not None:
|
||||
# Log a warning, indicating the override and the normal group this shuttle belongs to, then set
|
||||
# shipyard_group to the override.
|
||||
logger.warning(f"Map '{map_name}' (from prototype '{map_proto}') is using mapchecker_group_override. "
|
||||
f"This map will be treated as a '{shipyard_override}' shuttle. (Normally: "
|
||||
f"'{shipyard_group}'))")
|
||||
shipyard_group = shipyard_override
|
||||
|
||||
logger.debug(f"Starting checks for '{map_name}' (Path: '{map_file_location}' | Shipyard: '{shipyard_group}')")
|
||||
|
||||
# Now construct a temporary list of all prototype ID's that are illegal for this map based on conditionals.
|
||||
conditional_checks = set() # Make a set of it. That way we get no duplicates.
|
||||
for key in conditionally_illegal_prototypes.keys():
|
||||
if shipyard_group != key:
|
||||
for item in conditionally_illegal_prototypes[key]:
|
||||
conditional_checks.add(item)
|
||||
# Remove the ones that do match, if they exist.
|
||||
if shipyard_group is not None and shipyard_group in conditionally_illegal_prototypes.keys():
|
||||
for check in conditionally_illegal_prototypes[shipyard_group]:
|
||||
if check in conditional_checks:
|
||||
conditional_checks.remove(check)
|
||||
|
||||
logger.debug(f"Conditional checks for {map_name} after removal of shipyard dups: {conditional_checks}")
|
||||
|
||||
# Now we check the map file for these illegal prototypes. I'm being lazy here and just matching against the
|
||||
# entire file contents, without loading YAML at all. This is fine, because this job only runs after
|
||||
# Content.YamlLinter runs. TODO: It does not.
|
||||
with open("Resources" + map_file_location, "r") as map_file:
|
||||
map_file_contents = map_file.read()
|
||||
for check in illegal_prototypes:
|
||||
# Wrap in 'proto: ' and '\n' here, to ensure we only match actual prototypes, not 'part of word'
|
||||
# prototypes. Example: SignSec is a prefix of SignSecureMed
|
||||
if 'proto: ' + check + '\n' in map_file_contents:
|
||||
if violations.get(map_name) is None:
|
||||
violations[map_name] = list()
|
||||
violations[map_name].append(check)
|
||||
for check in conditional_checks:
|
||||
if 'proto: ' + check + '\n' in map_file_contents:
|
||||
if violations.get(map_name) is None:
|
||||
violations[map_name] = list()
|
||||
violations[map_name].append(check)
|
||||
|
||||
# ==================================================================================================================
|
||||
# PHASE 3: Filtering findings and reporting.
|
||||
logger.debug(f"Violations aggregator before whitelist processing: {violations}")
|
||||
|
||||
# Filter out all prototypes that are whitelisted.
|
||||
for key in whitelisted_protos.keys():
|
||||
if violations.get(key) is None:
|
||||
continue
|
||||
|
||||
for whitelisted_proto in whitelisted_protos[key]:
|
||||
if whitelisted_proto in violations[key]:
|
||||
violations[key].remove(whitelisted_proto)
|
||||
|
||||
logger.debug(f"Violations aggregator after whitelist processing: {violations}")
|
||||
|
||||
# Some maps had all their violations whitelisted. Remove them from the count.
|
||||
total_map_violations = len([viol for viol in violations.keys() if len(violations[viol]) > 0])
|
||||
|
||||
# Report findings to output, on the ERROR loglevel, so they stand out in Github actions output.
|
||||
if total_map_violations > 0:
|
||||
logger.error(f"Found {total_map_violations} maps with illegal prototypes.")
|
||||
for key in violations.keys():
|
||||
if len(violations[key]) == 0:
|
||||
# If the map has no violations at this point, it's because all of its violations were whitelisted.
|
||||
# Don't include them in the report.
|
||||
continue
|
||||
|
||||
logger.error(f"Map '{key}' has {len(violations[key])} illegal prototypes.")
|
||||
for violation in violations[key]:
|
||||
logger.error(f" - {violation}")
|
||||
else:
|
||||
logger.info("No illegal prototypes found in any maps.")
|
||||
|
||||
logger.info(f"MapChecker finished{' with errors' if total_map_violations > 0 else ''}.")
|
||||
if total_map_violations > 0:
|
||||
exit(1)
|
||||
else:
|
||||
exit(0)
|
||||
1
.github/mapchecker/requirements.txt
vendored
Normal file
1
.github/mapchecker/requirements.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
PyYAML==6.0.1
|
||||
105
.github/mapchecker/util.py
vendored
Normal file
105
.github/mapchecker/util.py
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
import logging
|
||||
|
||||
from yaml import SafeLoader
|
||||
from typing import List, Union
|
||||
from logging import Logger, getLogger
|
||||
|
||||
from config import ILLEGAL_MATCHES, LEGAL_OVERRIDES, CONDITIONALLY_ILLEGAL_MATCHES
|
||||
|
||||
|
||||
def get_logger(debug: bool = False) -> Logger:
|
||||
"""
|
||||
Gets a logger for use by MapChecker.
|
||||
|
||||
:return: A logger.
|
||||
"""
|
||||
logger = getLogger("MapChecker")
|
||||
logger.setLevel("DEBUG" if debug else "INFO")
|
||||
|
||||
sh = logging.StreamHandler()
|
||||
formatter = logging.Formatter(
|
||||
"[%(asctime)s %(levelname)7s] %(message)s",
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
sh.setFormatter(formatter)
|
||||
logger.addHandler(sh)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
# Snippet taken from https://stackoverflow.com/questions/33048540/pyyaml-safe-load-how-to-ignore-local-tags
|
||||
class YamlLoaderIgnoringTags(SafeLoader):
|
||||
def ignore_unknown(self, node):
|
||||
return None
|
||||
|
||||
|
||||
YamlLoaderIgnoringTags.add_constructor(None, YamlLoaderIgnoringTags.ignore_unknown)
|
||||
# End of snippet
|
||||
|
||||
|
||||
def check_prototype(proto_id: str, proto_name: str, proto_suffixes: List[str], proto_categories: List[str]) -> Union[bool, List[str]]:
|
||||
"""
|
||||
Checks prototype information against the ILLEGAL_MATCHES and CONDITIONALLY_ILLEGAL_MATCHES constants.
|
||||
|
||||
:param proto_id: The prototype's ID.
|
||||
:param proto_name: The prototype's name.
|
||||
:param proto_suffixes: The prototype's suffixes.
|
||||
:param proto_categories: The prototype's categories.
|
||||
:return:
|
||||
- True if the prototype is legal
|
||||
- False if the prototype is globally illegal (matched by ILLEGAL_MATCHES)
|
||||
- A list of shipyard keys if the prototype is conditionally illegal (matched by CONDITIONALLY_ILLEGAL_MATCHES)
|
||||
"""
|
||||
# Check against LEGAL_OVERRIDES (no suffix!)
|
||||
for legal_match in LEGAL_OVERRIDES:
|
||||
if legal_match.lower() in proto_name.lower():
|
||||
return True
|
||||
|
||||
if legal_match.lower() in proto_id.lower():
|
||||
return True
|
||||
|
||||
# Check against ILLEGAL_MATCHES.
|
||||
for illegal_match in ILLEGAL_MATCHES:
|
||||
if illegal_match.lower() in proto_name.lower():
|
||||
return False
|
||||
|
||||
if illegal_match.lower() in proto_id.lower():
|
||||
return False
|
||||
|
||||
for suffix in proto_suffixes:
|
||||
if illegal_match.lower() == suffix.lower():
|
||||
return False
|
||||
|
||||
for category in proto_categories:
|
||||
if illegal_match.lower() == category.lower():
|
||||
return False
|
||||
|
||||
# Check against CONDITIONALLY_ILLEGAL_MATCHES.
|
||||
conditionally_illegal_keys = list()
|
||||
for key in CONDITIONALLY_ILLEGAL_MATCHES.keys():
|
||||
|
||||
cond_illegal_matches = CONDITIONALLY_ILLEGAL_MATCHES[key]
|
||||
for cond_illegal_match in cond_illegal_matches:
|
||||
|
||||
if cond_illegal_match.lower() in proto_name.lower():
|
||||
conditionally_illegal_keys.append(key)
|
||||
break
|
||||
|
||||
if cond_illegal_match.lower() in proto_id.lower():
|
||||
conditionally_illegal_keys.append(key)
|
||||
break
|
||||
|
||||
for suffix in proto_suffixes:
|
||||
if cond_illegal_match.lower() == suffix.lower():
|
||||
conditionally_illegal_keys.append(key)
|
||||
break
|
||||
|
||||
for category in proto_categories:
|
||||
if cond_illegal_match.lower() == category.lower():
|
||||
conditionally_illegal_keys.append(key)
|
||||
break
|
||||
|
||||
if len(conditionally_illegal_keys) > 0:
|
||||
return conditionally_illegal_keys
|
||||
|
||||
return True
|
||||
10
.github/mapchecker/whitelist.yml
vendored
Normal file
10
.github/mapchecker/whitelist.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
# POI's
|
||||
Frontier: true
|
||||
Nfsd: true
|
||||
LPBravo: true
|
||||
Cove: true
|
||||
|
||||
# TECHNICAL DEBT BELOW. These ones were added to this list to ensure other PR's would not break upon merging. It is
|
||||
# the intention for this list to become empty in separate PR's.
|
||||
Tinnia:
|
||||
- RubberStampSyndicate # existing meme
|
||||
185
.github/rsi-schema.json
vendored
Normal file
185
.github/rsi-schema.json
vendored
Normal file
@ -0,0 +1,185 @@
|
||||
{
|
||||
"$schema":"http://json-schema.org/draft-07/schema",
|
||||
"default":{
|
||||
|
||||
},
|
||||
"description":"JSON Schema for SS14 RSI validation.",
|
||||
"examples":[
|
||||
{
|
||||
"version":1,
|
||||
"license":"CC-BY-SA-3.0",
|
||||
"copyright":"Taken from CODEBASE at COMMIT LINK",
|
||||
"size":{
|
||||
"x":32,
|
||||
"y":32
|
||||
},
|
||||
"states":[
|
||||
{
|
||||
"name":"basic"
|
||||
},
|
||||
{
|
||||
"name":"basic-directions",
|
||||
"directions":4
|
||||
},
|
||||
{
|
||||
"name":"basic-delays",
|
||||
"delays":[
|
||||
[
|
||||
0.1,
|
||||
0.1
|
||||
]
|
||||
]
|
||||
},
|
||||
{
|
||||
"name":"basic-delays-directions",
|
||||
"directions":4,
|
||||
"delays":[
|
||||
[
|
||||
0.1,
|
||||
0.1
|
||||
],
|
||||
[
|
||||
0.1,
|
||||
0.1
|
||||
],
|
||||
[
|
||||
0.1,
|
||||
0.1
|
||||
],
|
||||
[
|
||||
0.1,
|
||||
0.1
|
||||
]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"required":[
|
||||
"version",
|
||||
"license",
|
||||
"copyright",
|
||||
"size",
|
||||
"states"
|
||||
],
|
||||
"title":"RSI Schema",
|
||||
"type":"object",
|
||||
"properties":{
|
||||
"version":{
|
||||
"$id":"#/properties/version",
|
||||
"default":"",
|
||||
"description":"RSI version integer.",
|
||||
"title":"The version schema",
|
||||
"type":"integer"
|
||||
},
|
||||
"license":{
|
||||
"$id":"#/properties/license",
|
||||
"default":"",
|
||||
"description":"The license for the associated icon states. Restricted to SS14-compatible asset licenses.",
|
||||
"enum":[
|
||||
"CC-BY-SA-3.0",
|
||||
"CC-BY-SA-4.0",
|
||||
"CC-BY-NC-3.0",
|
||||
"CC-BY-NC-4.0",
|
||||
"CC-BY-NC-SA-3.0",
|
||||
"CC-BY-NC-SA-4.0",
|
||||
"CC0-1.0"
|
||||
],
|
||||
"examples":[
|
||||
"CC-BY-SA-3.0"
|
||||
],
|
||||
"title":"License",
|
||||
"type":"string"
|
||||
},
|
||||
"copyright":{
|
||||
"$id":"#/properties/copyright",
|
||||
"type":"string",
|
||||
"title":"Copyright Info",
|
||||
"description":"The copyright holder. This is typically a link to the commit of the codebase that the icon is pulled from.",
|
||||
"default":"",
|
||||
"examples":[
|
||||
"Taken from CODEBASE at COMMIT LINK"
|
||||
]
|
||||
},
|
||||
"size":{
|
||||
"$id":"#/properties/size",
|
||||
"default":{
|
||||
|
||||
},
|
||||
"description":"The dimensions of the sprites inside the RSI. This is not the size of the PNG files that store the sprite sheet.",
|
||||
"examples":[
|
||||
{
|
||||
"x":32,
|
||||
"y":32
|
||||
}
|
||||
],
|
||||
"title":"Sprite Dimensions",
|
||||
"required":[
|
||||
"x",
|
||||
"y"
|
||||
],
|
||||
"type":"object",
|
||||
"properties":{
|
||||
"x":{
|
||||
"$id":"#/properties/size/properties/x",
|
||||
"type":"integer",
|
||||
"default":32,
|
||||
"examples":[
|
||||
32
|
||||
]
|
||||
},
|
||||
"y":{
|
||||
"$id":"#/properties/size/properties/y",
|
||||
"type":"integer",
|
||||
"default":32,
|
||||
"examples":[
|
||||
32
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties":true
|
||||
},
|
||||
"states":{
|
||||
"$id":"#/properties/states",
|
||||
"type":"array",
|
||||
"title":"Icon States",
|
||||
"description":"Metadata for icon states. Includes name, directions, delays, etc.",
|
||||
"default":[
|
||||
|
||||
],
|
||||
"examples":[
|
||||
[
|
||||
{
|
||||
"name":"basic"
|
||||
},
|
||||
{
|
||||
"name":"basic-directions",
|
||||
"directions":4
|
||||
}
|
||||
]
|
||||
],
|
||||
"additionalItems":true,
|
||||
"items":{
|
||||
"$id":"#/properties/states/items",
|
||||
"type":"object",
|
||||
"required":[
|
||||
"name"
|
||||
],
|
||||
"properties":{
|
||||
"name":{
|
||||
"type":"string"
|
||||
},
|
||||
"directions":{
|
||||
"type":"integer",
|
||||
"enum":[
|
||||
1,
|
||||
4,
|
||||
8
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties":true
|
||||
}
|
||||
47
.github/workflows/benchmarks.yml
vendored
Normal file
47
.github/workflows/benchmarks.yml
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
name: Benchmarks
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 8 * * *'
|
||||
|
||||
concurrency: benchmarks
|
||||
|
||||
jobs:
|
||||
benchmark:
|
||||
name: Run Benchmarks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Get Engine version
|
||||
run: |
|
||||
cd RobustToolbox
|
||||
git fetch --depth=1
|
||||
echo "::set-output name=out::$(git rev-parse HEAD)"
|
||||
id: engine_version
|
||||
- name: Run script on centcomm
|
||||
uses: appleboy/ssh-action@master
|
||||
with:
|
||||
host: centcomm.spacestation14.io
|
||||
username: robust-benchmark-runner
|
||||
key: ${{ secrets.CENTCOMM_ROBUST_BENCHMARK_RUNNER_KEY }}
|
||||
command_timeout: 100000m
|
||||
script: |
|
||||
mkdir benchmark_run_content_${{ github.sha }}
|
||||
cd benchmark_run_content_${{ github.sha }}
|
||||
git clone https://github.com/space-wizards/space-station-14.git repo_dir --recursive
|
||||
cd repo_dir
|
||||
git checkout ${{ github.sha }}
|
||||
cd Content.Benchmarks
|
||||
dotnet restore
|
||||
export ROBUST_BENCHMARKS_ENABLE_SQL=1
|
||||
export ROBUST_BENCHMARKS_SQL_ADDRESS="${{ secrets.BENCHMARKS_WRITE_ADDRESS }}"
|
||||
export ROBUST_BENCHMARKS_SQL_PORT="${{ secrets.BENCHMARKS_WRITE_PORT }}"
|
||||
export ROBUST_BENCHMARKS_SQL_USER="${{ secrets.BENCHMARKS_WRITE_USER }}"
|
||||
export ROBUST_BENCHMARKS_SQL_PASSWORD="${{ secrets.BENCHMARKS_WRITE_PASSWORD }}"
|
||||
export ROBUST_BENCHMARKS_SQL_DATABASE="content_benchmarks"
|
||||
export GITHUB_SHA="${{ github.sha }}"
|
||||
dotnet run --filter '*' --configuration Release
|
||||
cd ../../..
|
||||
rm -rf benchmark_run_content_${{ github.sha }}
|
||||
41
.github/workflows/build-docfx.yml
vendored
Normal file
41
.github/workflows/build-docfx.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Build & Publish Docfx
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 0"
|
||||
|
||||
jobs:
|
||||
docfx:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Setup submodule
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
- name: Update Engine Submodules
|
||||
run: |
|
||||
cd RobustToolbox/
|
||||
git submodule update --init --recursive
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4.1.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build Project
|
||||
run: dotnet build --no-restore /p:WarningsAsErrors=nullable
|
||||
|
||||
- name: Build DocFX
|
||||
uses: nikeee/docfx-action@v1.0.0
|
||||
with:
|
||||
args: Content.Docfx/docfx.json
|
||||
|
||||
- name: Publish Docfx Documentation on GitHub Pages
|
||||
uses: maxheld83/ghpages@master
|
||||
env:
|
||||
BUILD_DIR: Content.Docfx/_content-site
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
57
.github/workflows/build-map-renderer.yml
vendored
Normal file
57
.github/workflows/build-map-renderer.yml
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
name: Build & Test Map Renderer
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, staging, stable ]
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
branches: [ master, staging, stable ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.actor != 'PJBot' && github.event.pull_request.draft == false && github.actor != 'FrontierATC' # Frontier
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Master
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Setup Submodule
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
|
||||
- name: Update Engine Submodules
|
||||
run: |
|
||||
cd RobustToolbox/
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4.1.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build Project
|
||||
run: dotnet build Content.MapRenderer --configuration Release --no-restore /p:WarningsAsErrors=nullable /m
|
||||
|
||||
- name: Run Map Renderer
|
||||
run: dotnet run --project Content.MapRenderer Dev
|
||||
|
||||
ci-success:
|
||||
name: Build & Test Debug
|
||||
needs:
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: CI succeeded
|
||||
run: exit 0
|
||||
62
.github/workflows/build-test-debug.yml
vendored
Normal file
62
.github/workflows/build-test-debug.yml
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
name: Build & Test Debug
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, staging, stable ]
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
branches: [ master, staging, stable ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.actor != 'PJBot' && github.event.pull_request.draft == false && github.actor != 'FrontierATC' # Frontier
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Master
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Setup Submodule
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
|
||||
- name: Update Engine Submodules
|
||||
run: |
|
||||
cd RobustToolbox/
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4.1.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build Project
|
||||
run: dotnet build --configuration DebugOpt --no-restore /p:WarningsAsErrors=nullable /m
|
||||
|
||||
- name: Run Content.Tests
|
||||
run: dotnet test --no-build --configuration DebugOpt Content.Tests/Content.Tests.csproj -- NUnit.ConsoleOut=0
|
||||
|
||||
- name: Run Content.IntegrationTests
|
||||
shell: pwsh
|
||||
run: |
|
||||
$env:DOTNET_gcServer=1
|
||||
dotnet test --no-build --configuration DebugOpt Content.IntegrationTests/Content.IntegrationTests.csproj --filter "FullyQualifiedName!~ShipyardTest" -- NUnit.ConsoleOut=0 NUnit.MapWarningTo=Failed
|
||||
ci-success:
|
||||
name: Build & Test Debug
|
||||
needs:
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: CI succeeded
|
||||
run: exit 0
|
||||
55
.github/workflows/changelog.yml
vendored
Normal file
55
.github/workflows/changelog.yml
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
name: PR Changelogs
|
||||
concurrency: commit_action
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }}
|
||||
CHANGELOG_DIR: Resources/Changelog/Frontier.yml # Frontier: Changelog.yml<Frontier.yml
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout Master
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.BOT_TOKEN }}
|
||||
ref: master
|
||||
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global user.name "${{ vars.CHANGELOG_USER }}"
|
||||
git config --global user.email "${{ vars.CHANGELOG_EMAIL }}"
|
||||
shell: bash
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cd "Tools/_NF/changelog"
|
||||
npm install
|
||||
shell: bash
|
||||
|
||||
- name: Generate Changelog
|
||||
run: |
|
||||
cd "Tools/_NF/changelog"
|
||||
node changelog.js
|
||||
shell: bash
|
||||
|
||||
- name: Commit Changelog
|
||||
run: |
|
||||
git pull origin master
|
||||
git add *.yml
|
||||
git commit -m "${{ vars.CHANGELOG_MESSAGE }} (#${{ env.PR_NUMBER }})"
|
||||
git push
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
15
.github/workflows/check-crlf.yml
vendored
Normal file
15
.github/workflows/check-crlf.yml
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
name: CRLF Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: CRLF Check
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Check for CRLF
|
||||
run: Tools/check_crlf.py
|
||||
54
.github/workflows/check-trailing-whitespace.yml
vendored
Normal file
54
.github/workflows/check-trailing-whitespace.yml
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
name: Trailing Whitespace Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Trailing Whitespace Check
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Get changed text files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v46.0.5
|
||||
with:
|
||||
files: |
|
||||
**.cs
|
||||
**.yml
|
||||
**.swsl
|
||||
**.json
|
||||
**.py
|
||||
- name: Check for trailing whitespace and EOF newline
|
||||
env:
|
||||
ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||
run: |
|
||||
has_trailing_whitespace=0
|
||||
has_missing_eof_newline=0
|
||||
|
||||
for file in ${ALL_CHANGED_FILES}; do
|
||||
echo "Checking $file"
|
||||
|
||||
# Check for trailing whitespace
|
||||
if grep -qP '[ \t]+$' "$file"; then
|
||||
echo "::error file=$file::Trailing whitespace found"
|
||||
has_trailing_whitespace=1
|
||||
fi
|
||||
|
||||
# Check for missing EOF newline
|
||||
if [ -f "$file" ] && [ -s "$file" ]; then
|
||||
last_char=$(tail -c 1 "$file")
|
||||
if [ "$last_char" != "" ] && [ "$last_char" != $'\n' ]; then
|
||||
echo "::error file=$file::Missing newline at end of file"
|
||||
has_missing_eof_newline=1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$has_trailing_whitespace" -eq 1 ] || [ "$has_missing_eof_newline" -eq 1 ]; then
|
||||
echo "Issues found: trailing whitespace or missing EOF newline."
|
||||
echo "We recommend using an IDE to prevent this from happening."
|
||||
exit 1
|
||||
fi
|
||||
27
.github/workflows/close-master-pr.yml
vendored
Normal file
27
.github/workflows/close-master-pr.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: Close PRs on master
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ opened, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{github.head_ref == 'master' || github.head_ref == 'main' || github.head_ref == 'develop'}}
|
||||
|
||||
steps:
|
||||
- uses: superbrothers/close-pull-request@v3
|
||||
with:
|
||||
comment: "Thank you for your contribution! It appears you created a PR from your master branch, this is [something you should avoid doing](https://jmeridth.com/posts/do-not-issue-pull-requests-from-your-master-branch/), and thus this PR has been automatically closed. \n \n We suggest you follow [our git usage documentation](https://docs.spacestation14.com/en/general-development/setup/git-for-the-ss14-developer.html). \n \n You can move your current work from the master branch to another branch by following [these commands](https://ohshitgit.com/#accidental-commit-master). And then you may recreate your PR using the new branch."
|
||||
|
||||
# If you prefer to just comment on the pr and not close it, uncomment the bellow and comment the above
|
||||
|
||||
# - uses: actions/github-script@v7
|
||||
# with:
|
||||
# script: |
|
||||
# github.rest.issues.createComment({
|
||||
# issue_number: ${{ github.event.number }},
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# body: "Thank you for contributing to the Space Station 14 repository. Unfortunately, it looks like you submitted your pull request from the master branch. We suggest you follow [our git usage documentation](https://docs.spacestation14.com/en/general-development/setup/git-for-the-ss14-developer.html) \n\n You can move your current work from the master branch to another branch by doing `git branch <branch_name` and resetting the master branch. \n\n This pr won't be automatically closed. However, a maintainer may close it for this reason."
|
||||
# })
|
||||
21
.github/workflows/labeler-conflict.yml
vendored
Normal file
21
.github/workflows/labeler-conflict.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
name: Check Merge Conflicts
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
|
||||
jobs:
|
||||
Label:
|
||||
if: ( github.event.pull_request.draft == false ) && ( github.actor != 'PJBot' ) && ( github.actor != 'FrontierATC' ) # Frontier
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check for Merge Conflicts
|
||||
uses: eps1lon/actions-label-merge-conflict@v3.0.0
|
||||
with:
|
||||
dirtyLabel: "S: Merge Conflict"
|
||||
repoToken: "${{ secrets.GITHUB_TOKEN }}"
|
||||
commentOnDirty: "This pull request has conflicts, please resolve those before we can evaluate the pull request."
|
||||
16
.github/workflows/labeler-needsreview.yml
vendored
Normal file
16
.github/workflows/labeler-needsreview.yml
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
name: "Labels: Review"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [review_requested]
|
||||
|
||||
jobs:
|
||||
add_label:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-add-labels@v1
|
||||
with:
|
||||
labels: "S: Needs Review"
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: "S: Awaiting Changes"
|
||||
14
.github/workflows/labeler-pr.yml
vendored
Normal file
14
.github/workflows/labeler-pr.yml
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
name: "Labels: PR"
|
||||
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
if: github.actor != 'PJBot' && github.actor != 'FrontierATC' # Frontier
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
24
.github/workflows/labeler-review.yml
vendored
Normal file
24
.github/workflows/labeler-review.yml
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
name: "Labels: Approved"
|
||||
on:
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
jobs:
|
||||
add_label:
|
||||
# Change the repository name after you've made sure the team name is correct for your fork!
|
||||
# Frontier: repo changed (space-wizards/space-station-14 < new-frontiers-14/frontier-station-14)
|
||||
if: ${{ (github.repository == 'new-frontiers-14/frontier-station-14') && (github.event.review.state == 'APPROVED') }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: tspascoal/get-user-teams-membership@v3
|
||||
id: checkUserMember
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
team: "content-maintainers,junior-maintainers"
|
||||
GITHUB_TOKEN: ${{ secrets.LABELER_PAT }}
|
||||
- if: ${{ steps.checkUserMember.outputs.isTeamMember == 'true' }}
|
||||
uses: actions-ecosystem/action-add-labels@v1
|
||||
with:
|
||||
labels: "S: Approved"
|
||||
20
.github/workflows/labeler-size.yml
vendored
Normal file
20
.github/workflows/labeler-size.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
name: "Labels: Size"
|
||||
on: pull_request_target
|
||||
jobs:
|
||||
size-label:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: size-label
|
||||
uses: "pascalgn/size-label-action@v0.5.5"
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
with:
|
||||
# Custom size configuration
|
||||
sizes: >
|
||||
{
|
||||
"0": "XS",
|
||||
"10": "S",
|
||||
"100": "M",
|
||||
"1000": "L",
|
||||
"5000": "XL"
|
||||
}
|
||||
16
.github/workflows/labeler-stable.yml
vendored
Normal file
16
.github/workflows/labeler-stable.yml
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
name: "Labels: Branch stable"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
branches:
|
||||
- 'stable'
|
||||
|
||||
jobs:
|
||||
add_label:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-add-labels@v1
|
||||
with:
|
||||
labels: "Branch: Stable"
|
||||
16
.github/workflows/labeler-staging.yml
vendored
Normal file
16
.github/workflows/labeler-staging.yml
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
name: "Labels: Branch staging"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
branches:
|
||||
- 'staging'
|
||||
|
||||
jobs:
|
||||
add_label:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-add-labels@v1
|
||||
with:
|
||||
labels: "Branch: Staging"
|
||||
16
.github/workflows/labeler-untriaged.yml
vendored
Normal file
16
.github/workflows/labeler-untriaged.yml
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
name: "Labels: Untriaged"
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
add_label:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-add-labels@v1
|
||||
if: join(github.event.issue.labels) == ''
|
||||
with:
|
||||
labels: "S: Untriaged"
|
||||
37
.github/workflows/nf-mapchecker.yml
vendored
Normal file
37
.github/workflows/nf-mapchecker.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
name: Map Prototype Checker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
paths:
|
||||
# Entity pathspecs - If any of these change (i.e. suffix changes etc), this check should run.
|
||||
- "Resources/Prototypes/Entities/**/*.yml"
|
||||
- "Resources/Prototypes/_NF/Entities/**/*.yml"
|
||||
- "Resources/Prototypes/Nyanotrasen/Entities/**/*.yml"
|
||||
- "Resources/Prototypes/_DV/Entities/**/*.yml"
|
||||
# Map pathspecs - If any maps are changed, this should run.
|
||||
- "Resources/Maps/**/*.yml"
|
||||
# Also the mapchecker itself
|
||||
- ".github/mapchecker/**"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r .github/mapchecker/requirements.txt
|
||||
- name: Run mapchecker
|
||||
run: |
|
||||
python3 .github/mapchecker/mapchecker.py
|
||||
59
.github/workflows/nf-shipyard-tests.yml
vendored
Normal file
59
.github/workflows/nf-shipyard-tests.yml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
name: Build & Run Shipyard Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
paths:
|
||||
- "Resources/Maps/_NF/Shuttles/**/*.yml" # Ship grids
|
||||
- "Resources/Prototypes/_NF/Shipyard/**/*.yml" # Shipyard prototypes
|
||||
- "Content.IntegrationTests/Tests/_NF/ShipyardTests.cs" # Shipyard tests
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.event.pull_request.draft == false && github.actor != 'FrontierATC'
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Master
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Setup Submodule
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
|
||||
- name: Update Engine Submodules
|
||||
run: |
|
||||
cd RobustToolbox/
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4.1.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install Dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build Project
|
||||
run: dotnet build --configuration DebugOpt --no-restore /p:WarningsAsErrors=nullable /m
|
||||
|
||||
- name: Run Shipyard Tests
|
||||
shell: pwsh
|
||||
run: |
|
||||
$env:DOTNET_gcServer=1
|
||||
dotnet test --no-build --configuration DebugOpt Content.IntegrationTests/Content.IntegrationTests.csproj --filter FullyQualifiedName~ShipyardTest -- NUnit.ConsoleOut=0 NUnit.MapWarningTo=Failed
|
||||
ci-success:
|
||||
name: Build & Run Shipyard Tests
|
||||
needs:
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: CI succeeded
|
||||
run: exit 0
|
||||
27
.github/workflows/nf-validate-changelog.yml
vendored
Normal file
27
.github/workflows/nf-validate-changelog.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: "Changelog Validator"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, edited, reopened, synchronize]
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }}
|
||||
|
||||
jobs:
|
||||
validate-changelog:
|
||||
name: Changelog validator
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Branch
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
|
||||
- name: Validate Changelog
|
||||
run: |
|
||||
cd "Tools/_NF/changelog"
|
||||
node validate_changelog.js
|
||||
shell: bash
|
||||
14
.github/workflows/no-submodule-update.yml
vendored
Normal file
14
.github/workflows/no-submodule-update.yml
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
name: No submodule update checker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'RobustToolbox'
|
||||
|
||||
jobs:
|
||||
this_aint_right:
|
||||
name: Submodule update in pr found
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail
|
||||
run: exit 1
|
||||
24
.github/workflows/publish-changelog.yml
vendored
Normal file
24
.github/workflows/publish-changelog.yml
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
name: Publish Changelog
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 10 * * *'
|
||||
|
||||
jobs:
|
||||
publish_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{secrets.GITHUB_TOKEN}}
|
||||
ref: master
|
||||
|
||||
- name: Publish changelog
|
||||
run: Tools/actions_changelogs_since_last_run.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DISCORD_WEBHOOK_URL: ${{ secrets.CHANGELOG_DISCORD_WEBHOOK }}
|
||||
continue-on-error: true
|
||||
46
.github/workflows/publish-testing.yml
vendored
Normal file
46
.github/workflows/publish-testing.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
name: Publish Testing
|
||||
|
||||
concurrency:
|
||||
group: publish-testing
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# schedule: # Frontier: disable auto-schedule
|
||||
# - cron: '0 10 * * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3.6.0
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v3.2.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Get Engine Tag
|
||||
run: |
|
||||
cd RobustToolbox
|
||||
git fetch --depth=1
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build Packaging
|
||||
run: dotnet build Content.Packaging --configuration Release --no-restore /m
|
||||
|
||||
- name: Package server
|
||||
run: dotnet run --project Content.Packaging server --platform win-x64 --platform linux-x64 --platform osx-x64 --platform linux-arm64
|
||||
|
||||
- name: Package client
|
||||
run: dotnet run --project Content.Packaging client --no-wipe-release
|
||||
|
||||
- name: Publish version
|
||||
run: Tools/publish_multi_request.py --fork-id wizards-testing
|
||||
env:
|
||||
PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }}
|
||||
GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }}
|
||||
63
.github/workflows/publish.yml
vendored
Normal file
63
.github/workflows/publish.yml
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
name: Publish
|
||||
|
||||
concurrency:
|
||||
group: publish
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# Frontier: re-enabled autopublish
|
||||
schedule:
|
||||
- cron: '0 10 * * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get install -y python3-paramiko python3-lxml
|
||||
|
||||
- uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4.1.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Get Engine Tag
|
||||
run: |
|
||||
cd RobustToolbox
|
||||
git fetch --depth=1
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build Packaging
|
||||
run: dotnet build Content.Packaging --configuration Release --no-restore /m
|
||||
|
||||
- name: Package server
|
||||
run: dotnet run --project Content.Packaging server --platform win-x64 --platform linux-x64 --platform osx-x64 --platform linux-arm64
|
||||
|
||||
- name: Package client
|
||||
run: dotnet run --project Content.Packaging client --no-wipe-release
|
||||
|
||||
- name: Publish version
|
||||
run: Tools/publish_multi_request.py
|
||||
env:
|
||||
PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }}
|
||||
GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }}
|
||||
|
||||
# - name: Publish changelog (Discord)
|
||||
# continue-on-error: true
|
||||
# run: Tools/actions_changelogs_since_last_run.py
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# DISCORD_WEBHOOK_URL: ${{ secrets.CHANGELOG_DISCORD_WEBHOOK }}
|
||||
|
||||
- name: Publish changelog (RSS)
|
||||
continue-on-error: true
|
||||
run: Tools/actions_changelog_rss.py
|
||||
env:
|
||||
CHANGELOG_RSS_KEY: ${{ secrets.CHANGELOG_RSS_KEY }}
|
||||
69
.github/workflows/rsi-diff.yml
vendored
Normal file
69
.github/workflows/rsi-diff.yml
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
name: Diff RSIs
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '**.rsi/**.png'
|
||||
|
||||
jobs:
|
||||
diff:
|
||||
name: Diff
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Get changed files
|
||||
id: files
|
||||
uses: Ana06/get-changed-files@v2.3.0
|
||||
with:
|
||||
format: 'space-delimited'
|
||||
filter: |
|
||||
**.rsi
|
||||
**.png
|
||||
|
||||
- name: Diff changed RSIs
|
||||
id: diff
|
||||
uses: space-wizards/RSIDiffBot@v1.1
|
||||
with:
|
||||
modified: ${{ steps.files.outputs.modified }}
|
||||
removed: ${{ steps.files.outputs.removed }}
|
||||
added: ${{ steps.files.outputs.added }}
|
||||
basename: ${{ github.event.pull_request.base.repo.full_name }}
|
||||
basesha: ${{ github.event.pull_request.base.sha }}
|
||||
headname: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
headsha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Potentially find comment
|
||||
uses: peter-evans/find-comment@v1
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: RSI Diff Bot
|
||||
|
||||
- name: Create comment if it doesn't exist
|
||||
if: steps.fc.outputs.comment-id == ''
|
||||
uses: peter-evans/create-or-update-comment@v1
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
body: |
|
||||
${{ steps.diff.outputs.summary-details }}
|
||||
|
||||
- name: Update comment if it exists
|
||||
if: steps.fc.outputs.comment-id != ''
|
||||
uses: peter-evans/create-or-update-comment@v1
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
${{ steps.diff.outputs.summary-details }}
|
||||
|
||||
- name: Update comment to read that it has been edited
|
||||
if: steps.fc.outputs.comment-id != ''
|
||||
uses: peter-evans/create-or-update-comment@v1
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
edit-mode: append
|
||||
body: |
|
||||
Edit: diff updated after ${{ github.event.pull_request.head.sha }}
|
||||
66
.github/workflows/test-packaging.yml
vendored
Normal file
66
.github/workflows/test-packaging.yml
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
name: Test Packaging
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, staging, stable ]
|
||||
paths:
|
||||
- '**.cs'
|
||||
- '**.csproj'
|
||||
- '**.sln'
|
||||
- '**.git**'
|
||||
- '**.yml'
|
||||
# no docs on which one of these is supposed to work, so
|
||||
# why not just do both
|
||||
- 'RobustToolbox'
|
||||
- 'RobustToolbox/**'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
branches: [ master, staging, stable ]
|
||||
paths:
|
||||
- '**.cs'
|
||||
- '**.csproj'
|
||||
- '**.sln'
|
||||
- '**.git**'
|
||||
- '**.yml'
|
||||
- 'RobustToolbox'
|
||||
- 'RobustToolbox/**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Test Packaging
|
||||
if: github.actor != 'PJBot' && github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Master
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Setup Submodule
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
|
||||
- name: Update Engine Submodules
|
||||
run: |
|
||||
cd RobustToolbox/
|
||||
git submodule update --init --recursive
|
||||
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4.1.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build Packaging
|
||||
run: dotnet build Content.Packaging --configuration Release --no-restore /m
|
||||
|
||||
- name: Package server
|
||||
run: dotnet run --project Content.Packaging server --platform win-x64 --platform linux-x64 --platform osx-x64 --platform linux-arm64
|
||||
|
||||
- name: Package client
|
||||
run: dotnet run --project Content.Packaging client --no-wipe-release
|
||||
55
.github/workflows/update-credits.yml
vendored
Normal file
55
.github/workflows/update-credits.yml
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
name: Update Contrib and Patreons in credits
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# schedule: # Frontier
|
||||
# - cron: 0 0 * * 0 # Frontier
|
||||
|
||||
jobs:
|
||||
get_credits:
|
||||
runs-on: ubuntu-latest
|
||||
# Hey there fork dev! If you like to include your own contributors in this then you can probably just change this to your own repo
|
||||
# Do this in dump_github_contributors.ps1 too into your own repo
|
||||
if: github.repository == 'new-frontiers-14/frontier-station-14' # Frontier: space-wizards/space-station-14<new-frontiers-14/frontier-station-14
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
ref: master
|
||||
|
||||
- name: Get this week's Contributors
|
||||
shell: pwsh
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
run: Tools/dump_github_contributors.ps1 > Resources/Credits/GitHub.txt
|
||||
|
||||
# TODO
|
||||
#- name: Get this week's Patreons
|
||||
# run: Tools/script2dumppatreons > Resources/Credits/Patrons.yml
|
||||
|
||||
# MAKE SURE YOU ENABLED "Allow GitHub Actions to create and approve pull requests" IN YOUR ACTIONS, OTHERWISE IT WILL MOST LIKELY FAIL
|
||||
|
||||
|
||||
# For this you can use a pat token of an account with direct push access to the repo if you have protected branches.
|
||||
# Uncomment this and comment the other line if you do this.
|
||||
# https://github.com/stefanzweifel/git-auto-commit-action#push-to-protected-branches
|
||||
|
||||
#- name: Commit new credit files
|
||||
# uses: stefanzweifel/git-auto-commit-action@v4
|
||||
# with:
|
||||
# commit_message: Update Credits
|
||||
# commit_author: PJBot <pieterjan.briers+bot@gmail.com>
|
||||
|
||||
# Taken from DeltaV, thank you mirrorcult, deltanedas.
|
||||
# This will make a PR
|
||||
- name: Set current date as env variable
|
||||
run: echo "NOW=$(date +'%Y-%m-%dT%H-%M-%S')" >> $GITHUB_ENV
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
commit-message: Update Credits
|
||||
title: Update Credits
|
||||
body: This is an automated Pull Request. This PR updates the github contributors in the credits section.
|
||||
author: FrontierATC <zuuswa@gmail.com>
|
||||
branch: automated/credits-${{env.NOW}}
|
||||
25
.github/workflows/validate-rgas.yml
vendored
Normal file
25
.github/workflows/validate-rgas.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
name: RGA schema validator
|
||||
on:
|
||||
push:
|
||||
branches: [ master, staging, stable ]
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
yaml-schema-validation:
|
||||
name: YAML RGA schema validator
|
||||
if: github.actor != 'PJBot' && github.event.pull_request.draft == false && github.actor != 'FrontierATC' # Frontier
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Setup Submodule
|
||||
run: git submodule update --init
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
- uses: PaulRitter/yaml-schema-validator@v1
|
||||
with:
|
||||
schema: RobustToolbox/Schemas/rga.yml
|
||||
path_pattern: .*attributions.ya?ml$
|
||||
validators_path: RobustToolbox/Schemas/rga_validators.py
|
||||
validators_requirements: RobustToolbox/Schemas/rga_requirements.txt
|
||||
30
.github/workflows/validate-rsis.yml
vendored
Normal file
30
.github/workflows/validate-rsis.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
name: RSI Validator
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, staging, stable ]
|
||||
merge_group:
|
||||
pull_request:
|
||||
paths:
|
||||
- '**.rsi/**'
|
||||
|
||||
jobs:
|
||||
validate_rsis:
|
||||
name: Validate RSIs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Setup Submodule
|
||||
run: git submodule update --init
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
- name: Set up Python 3.10 # Frontier
|
||||
uses: actions/setup-python@v3 # Frontier
|
||||
with: # Frontier
|
||||
python-version: "3.10" # Frontier
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
pip3 install --ignore-installed --user pillow jsonschema
|
||||
- name: Validate RSIs
|
||||
run: |
|
||||
python3 RobustToolbox/Schemas/validate_rsis.py Resources/
|
||||
25
.github/workflows/validate_mapfiles.yml
vendored
Normal file
25
.github/workflows/validate_mapfiles.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
name: Map file schema validator
|
||||
on:
|
||||
push:
|
||||
branches: [ master, staging, stable ]
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
yaml-schema-validation:
|
||||
name: YAML map schema validator
|
||||
if: github.actor != 'PJBot' && github.event.pull_request.draft == false && github.actor != 'FrontierATC' # Frontier
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Setup Submodule
|
||||
run: git submodule update --init
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
- uses: PaulRitter/yaml-schema-validator@v1
|
||||
with:
|
||||
schema: RobustToolbox/Schemas/mapfile.yml
|
||||
path_pattern: .*Resources/Maps/.*
|
||||
validators_path: RobustToolbox/Schemas/mapfile_validators.py
|
||||
validators_requirements: RobustToolbox/Schemas/mapfile_requirements.txt
|
||||
35
.github/workflows/yaml-linter.yml
vendored
Normal file
35
.github/workflows/yaml-linter.yml
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
name: YAML Linter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, staging, stable ]
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [ opened, reopened, synchronize, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: YAML Linter
|
||||
if: github.actor != 'PJBot' && github.event.pull_request.draft == false && github.actor != 'FrontierATC' # Frontier
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Setup submodule
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
- name: Pull engine updates
|
||||
uses: space-wizards/submodule-dependency@v0.1.5
|
||||
- name: Update Engine Submodules
|
||||
run: |
|
||||
cd RobustToolbox/
|
||||
git submodule update --init --recursive
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4.1.0
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
- name: Build
|
||||
run: dotnet build --configuration Release --no-restore /p:WarningsAsErrors= /m
|
||||
- name: Run Linter
|
||||
run: dotnet run --project Content.YAMLLinter/Content.YAMLLinter.csproj --no-build
|
||||
308
.gitignore
vendored
Normal file
308
.gitignore
vendored
Normal file
@ -0,0 +1,308 @@
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Ll]og/
|
||||
|
||||
# Visual Studio 2015 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUNIT
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# DNX
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_i.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# JustCode is a .NET coding add-in
|
||||
.JustCode
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# TODO: Comment the next line if you want to checkin your web deploy settings
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
#*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/packages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/packages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/packages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignoreable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
.nuget/
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
node_modules/
|
||||
orleans.codegen.cs
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# JetBrains Rider
|
||||
.idea/
|
||||
*.sln.iml
|
||||
.editorconfig
|
||||
|
||||
# CodeRush
|
||||
.cr/
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Visual Studio Code workspace settings.
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/tasks.json
|
||||
|
||||
# Release package files go here:
|
||||
release/
|
||||
|
||||
# Apple please go.
|
||||
.DS_Store
|
||||
# KDE, come in.
|
||||
.directory
|
||||
|
||||
BuildFiles/Mac/Space Station 14.app/Contents/MacOS/Godot
|
||||
BuildFiles/Mac/Space Station 14.app/Contents/MacOS/GodotSharpTools.dll
|
||||
BuildFiles/Mac/Space Station 14.app/Contents/MacOS/mscorlib.dll
|
||||
BuildFiles/Mac/Space Station 14.app/Contents/MacOS/libmonosgen-2.0.dylib
|
||||
BuildFiles/Windows/Godot/*
|
||||
|
||||
# Working on the tools scripts is getting annoying okay?
|
||||
.mypy_cache/
|
||||
|
||||
# Windows image file caches
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
|
||||
# Merge driver stuff
|
||||
Content.Tools/test/out.yml
|
||||
|
||||
# Windows
|
||||
desktop.ini
|
||||
|
||||
# Images generated using the MapRenderer
|
||||
Resources/MapImages
|
||||
|
||||
## Docfx stuff
|
||||
/Content.Docfx/api/
|
||||
/Content.Docfx/*site
|
||||
|
||||
*.bak
|
||||
|
||||
# Direnv stuff
|
||||
.direnv/
|
||||
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
[submodule "RobustToolbox"]
|
||||
path = RobustToolbox
|
||||
url = https://github.com/space-wizards/RobustToolbox.git
|
||||
branch = master
|
||||
7
.run/Content.Server+Client.run.xml
Normal file
7
.run/Content.Server+Client.run.xml
Normal file
@ -0,0 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Content Server+Client" type="CompoundRunConfigurationType">
|
||||
<toRun name="Content.Client" type="DotNetProject" />
|
||||
<toRun name="Content.Server" type="DotNetProject" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
6
.vscode/extensions.json
vendored
Normal file
6
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-dotnettools.csharp",
|
||||
"editorconfig.editorconfig"
|
||||
]
|
||||
}
|
||||
72
.vscode/launch.json
vendored
Normal file
72
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Client",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/bin/Content.Client/Content.Client.dll",
|
||||
"args": [],
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Client (Compatibility renderer)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/bin/Content.Client/Content.Client.dll",
|
||||
"args": "--cvar display.compat=true",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Server",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/bin/Content.Server/Content.Server.dll",
|
||||
"args": [],
|
||||
"console": "integratedTerminal",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "YAML Linter",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build-yaml-linter",
|
||||
"program": "${workspaceFolder}/bin/Content.YAMLLinter/Content.YAMLLinter.dll",
|
||||
"cwd": "${workspaceFolder}/Content.YAMLLinter",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
}
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Server/Client",
|
||||
"configurations": [
|
||||
"Server",
|
||||
"Client"
|
||||
],
|
||||
"preLaunchTask": "build"
|
||||
},
|
||||
// Frontier begin
|
||||
{
|
||||
"name": "Server/Client (Release)",
|
||||
"configurations": [
|
||||
"Server",
|
||||
"Client"
|
||||
],
|
||||
"preLaunchTask": "build-release"
|
||||
},
|
||||
{
|
||||
"name": "Server/Client (No Build)",
|
||||
"configurations": [
|
||||
"Server",
|
||||
"Client"
|
||||
]
|
||||
}
|
||||
// Frontier end
|
||||
]
|
||||
}
|
||||
101
.vscode/tasks.json
vendored
Normal file
101
.vscode/tasks.json
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"command": "dotnet",
|
||||
"type": "shell",
|
||||
"args": [
|
||||
"build",
|
||||
"/property:GenerateFullPaths=true", // Ask dotnet build to generate full paths for file names.
|
||||
"/consoleloggerparameters:'ForceNoAlign;NoSummary'" // Do not generate summary otherwise it leads to duplicate errors in Problems panel
|
||||
],
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
// Frontier begin
|
||||
{
|
||||
"label": "build-release",
|
||||
"command": "dotnet",
|
||||
"type": "shell",
|
||||
"args": [
|
||||
"build",
|
||||
"--configuration=Release", // Build in release mode. Note: --, not /. /configuration doesn't work, because Microsoft.
|
||||
"/property:GenerateFullPaths=true", // Ask dotnet build to generate full paths for file names.
|
||||
"/consoleloggerparameters:NoSummary" // Do not generate summary otherwise it leads to duplicate errors in Problems panel
|
||||
],
|
||||
"group": {
|
||||
"kind": "build"
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
// Frontier end
|
||||
{
|
||||
"label": "build-yaml-linter",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/Content.YAMLLinter/Content.YAMLLinter.csproj",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:'ForceNoAlign;NoSummary'"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"command": "dotnet",
|
||||
"type": "shell",
|
||||
"args": [
|
||||
"test",
|
||||
"--no-build",
|
||||
"--configuration",
|
||||
"DebugOpt",
|
||||
"Content.Tests/Content.Tests.csproj",
|
||||
"--",
|
||||
"NUnit.ConsoleOut=0"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test"
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "integration-test",
|
||||
"command": "dotnet",
|
||||
"type": "shell",
|
||||
"args": [
|
||||
"test",
|
||||
"--no-build",
|
||||
"--configuration",
|
||||
"DebugOpt",
|
||||
"Content.IntegrationTests/Content.IntegrationTests.csproj",
|
||||
"--",
|
||||
"NUnit.ConsoleOut=0",
|
||||
"NUnit.MapWarningTo=Failed.ConsoleOut=0",
|
||||
"NUnit.MapWarningTo=Failed"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test"
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
]
|
||||
}
|
||||
23
.woodpecker/build.yml
Normal file
23
.woodpecker/build.yml
Normal file
@ -0,0 +1,23 @@
|
||||
when:
|
||||
- event: [ manual ]
|
||||
|
||||
steps:
|
||||
- name: build Content.Packaging
|
||||
image: "localhost/dotnet-sdk:9.0.201"
|
||||
commands:
|
||||
- dotnet build Content.Packaging --configuration Release --property WarningLevel=0
|
||||
|
||||
- name: run Content.Packaging
|
||||
image: "localhost/dotnet-sdk:9.0.201"
|
||||
commands:
|
||||
- dotnet run --project Content.Packaging server --platform linux-x64 --hybrid-acz --property WarningLevel=0
|
||||
|
||||
- name: storage Content.Server
|
||||
image: "debian:bookworm"
|
||||
volumes:
|
||||
- "/var/lib/star-horizon/woodpecker/storage:/storage"
|
||||
commands:
|
||||
- bash -c 'if [[ ! -e /storage/ss14/${CI_COMMIT_BRANCH} ]]; then mkdir -p /storage/ss14/${CI_COMMIT_BRANCH}; fi'
|
||||
- mv release/SS14.Server_linux-x64.zip /storage/ss14/${CI_COMMIT_BRANCH}/
|
||||
- cp Resources/ConfigPresets/StarHorizon/server_config.toml /storage/ss14/${CI_COMMIT_BRANCH}/
|
||||
- echo '${CI_COMMIT_SHA}' > /storage/ss14/${CI_COMMIT_BRANCH}/build_commit.txt
|
||||
5
BuildChecker/.gitignore
vendored
Normal file
5
BuildChecker/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
INSTALLED_HOOKS_VERSION
|
||||
DISABLE_SUBMODULE_AUTOUPDATE
|
||||
*.nuget*
|
||||
project.assets.json
|
||||
project.packagespec.json
|
||||
52
BuildChecker/BuildChecker.csproj
Normal file
52
BuildChecker/BuildChecker.csproj
Normal file
@ -0,0 +1,52 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
This is a dummy .csproj file to check things like submodules.
|
||||
Better this than other errors.
|
||||
|
||||
If you want to create this kind of file yourself, you have to create an empty .NET application,
|
||||
Then strip it of everything until you have the <Project> tags.
|
||||
VS refuses to load the project if you make a bare project file and use Add -> Existing Project... for some reason.
|
||||
|
||||
You want to handle the Build, Clean and Rebuild tasks to prevent missing task errors on build.
|
||||
|
||||
If you want to learn more about these kinds of things, check out Microsoft's official documentation about MSBuild:
|
||||
https://docs.microsoft.com/en-us/visualstudio/msbuild/msbuild
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<Python>python3</Python>
|
||||
<Python Condition="'$(OS)'=='Windows_NT' Or '$(OS)'=='Windows'">py -3</Python>
|
||||
<ProjectGuid>{C899FCA4-7037-4E49-ABC2-44DE72487110}</ProjectGuid>
|
||||
<TargetFramework>net4.7.2</TargetFramework>
|
||||
<RestorePackages>false</RestorePackages>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<OutputType>Library</OutputType>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<StartupObject />
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<OutputPath>bin\Debug\</OutputPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
|
||||
<OutputPath>bin\Release\</OutputPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Tools|AnyCPU' ">
|
||||
<OutputPath>bin\Tools\</OutputPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'DebugOpt|AnyCPU' ">
|
||||
<OutputPath>bin\DebugOpt\</OutputPath>
|
||||
</PropertyGroup>
|
||||
<Target Name="Build">
|
||||
<Exec Command="$(Python) git_helper.py" CustomErrorRegularExpression="^Error" />
|
||||
</Target>
|
||||
<Target Name="Rebuild" DependsOnTargets="Build" />
|
||||
<Target Name="Clean">
|
||||
<Message Importance="low" Text="Ignoring 'Clean' target." />
|
||||
</Target>
|
||||
<Target Name="Compile">
|
||||
</Target>
|
||||
<Target Name="CoreCompile">
|
||||
</Target>
|
||||
</Project>
|
||||
125
BuildChecker/git_helper.py
Normal file
125
BuildChecker/git_helper.py
Normal file
@ -0,0 +1,125 @@
|
||||
#!/usr/bin/env python3
|
||||
# Installs git hooks, updates them, updates submodules, that kind of thing.
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
SOLUTION_PATH = Path("..") / "SpaceStation14.sln"
|
||||
# If this doesn't match the saved version we overwrite them all.
|
||||
CURRENT_HOOKS_VERSION = "2"
|
||||
QUIET = len(sys.argv) == 2 and sys.argv[1] == "--quiet"
|
||||
|
||||
|
||||
def run_command(command: List[str], capture: bool = False) -> subprocess.CompletedProcess:
|
||||
"""
|
||||
Runs a command with pretty output.
|
||||
"""
|
||||
text = ' '.join(command)
|
||||
if not QUIET:
|
||||
print("$ {}".format(text))
|
||||
|
||||
sys.stdout.flush()
|
||||
|
||||
completed = None
|
||||
|
||||
if capture:
|
||||
completed = subprocess.run(command, cwd="..", stdout=subprocess.PIPE)
|
||||
else:
|
||||
completed = subprocess.run(command, cwd="..")
|
||||
|
||||
if completed.returncode != 0:
|
||||
print("Error: command exited with code {}!".format(completed.returncode))
|
||||
|
||||
return completed
|
||||
|
||||
|
||||
def update_submodules():
|
||||
"""
|
||||
Updates all submodules.
|
||||
"""
|
||||
|
||||
if ('GITHUB_ACTIONS' in os.environ):
|
||||
return
|
||||
|
||||
if os.path.isfile("DISABLE_SUBMODULE_AUTOUPDATE"):
|
||||
return
|
||||
|
||||
if shutil.which("git") is None:
|
||||
raise FileNotFoundError("git not found in PATH")
|
||||
|
||||
# If the status doesn't match, force VS to reload the solution.
|
||||
# status = run_command(["git", "submodule", "status"], capture=True)
|
||||
run_command(["git", "submodule", "update", "--init", "--recursive"])
|
||||
# status2 = run_command(["git", "submodule", "status"], capture=True)
|
||||
|
||||
# Something changed.
|
||||
# if status.stdout != status2.stdout:
|
||||
# print("Git submodules changed. Reloading solution.")
|
||||
# reset_solution()
|
||||
|
||||
|
||||
def install_hooks():
|
||||
"""
|
||||
Installs the necessary git hooks into .git/hooks.
|
||||
"""
|
||||
|
||||
# Read version file.
|
||||
if os.path.isfile("INSTALLED_HOOKS_VERSION"):
|
||||
with open("INSTALLED_HOOKS_VERSION", "r") as f:
|
||||
if f.read() == CURRENT_HOOKS_VERSION:
|
||||
if not QUIET:
|
||||
print("No hooks change detected.")
|
||||
return
|
||||
|
||||
with open("INSTALLED_HOOKS_VERSION", "w") as f:
|
||||
f.write(CURRENT_HOOKS_VERSION)
|
||||
|
||||
print("Hooks need updating.")
|
||||
|
||||
hooks_target_dir = Path("..")/".git"/"hooks"
|
||||
hooks_source_dir = Path("hooks")
|
||||
|
||||
# Clear entire tree since we need to kill deleted files too.
|
||||
for filename in os.listdir(str(hooks_target_dir)):
|
||||
os.remove(str(hooks_target_dir/filename))
|
||||
|
||||
for filename in os.listdir(str(hooks_source_dir)):
|
||||
print("Copying hook {}".format(filename))
|
||||
shutil.copy2(str(hooks_source_dir/filename),
|
||||
str(hooks_target_dir/filename))
|
||||
|
||||
|
||||
def reset_solution():
|
||||
"""
|
||||
Force VS to think the solution has been changed to prompt the user to reload it, thus fixing any load errors.
|
||||
"""
|
||||
|
||||
with SOLUTION_PATH.open("r") as f:
|
||||
content = f.read()
|
||||
|
||||
with SOLUTION_PATH.open("w") as f:
|
||||
f.write(content)
|
||||
|
||||
def check_for_zip_download():
|
||||
# Check if .git exists,
|
||||
cur_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
if not os.path.isdir(os.path.join(cur_dir, ".git")):
|
||||
print("It appears that you downloaded this repository directly from GitHub. (Using the .zip download option) \n"
|
||||
"When downloading straight from GitHub, it leaves out important information that git needs to function. "
|
||||
"Such as information to download the engine or even the ability to even be able to create contributions. \n"
|
||||
"Please read and follow https://docs.spacestation14.com/en/general-development/setup/setting-up-a-development-environment.html \n"
|
||||
"If you just want a Sandbox Server, you are following the wrong guide! You can download a premade server following the instructions here:"
|
||||
"https://docs.spacestation14.com/en/general-development/setup/server-hosting-tutorial.html \n"
|
||||
"Closing automatically in 30 seconds.")
|
||||
time.sleep(30)
|
||||
exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
check_for_zip_download()
|
||||
install_hooks()
|
||||
update_submodules()
|
||||
13
BuildChecker/hooks/post-checkout
Executable file
13
BuildChecker/hooks/post-checkout
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
gitroot=`git rev-parse --show-toplevel`
|
||||
|
||||
cd "$gitroot/BuildChecker"
|
||||
|
||||
if [[ `uname` == MINGW* || `uname` == CYGWIN* ]]; then
|
||||
# Windows
|
||||
py -3 git_helper.py --quiet
|
||||
else
|
||||
# Not Windows, so probably some other Unix thing.
|
||||
python3 git_helper.py --quiet
|
||||
fi
|
||||
5
BuildChecker/hooks/post-merge
Executable file
5
BuildChecker/hooks/post-merge
Executable file
@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Just call post-checkout since it does the same thing.
|
||||
gitroot=`git rev-parse --show-toplevel`
|
||||
bash "$gitroot/.git/hooks/post-checkout"
|
||||
20
BuildFiles/Mac/Space Station 14.app/Contents/Info.plist
Normal file
20
BuildFiles/Mac/Space Station 14.app/Contents/Info.plist
Normal file
@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleName</key>
|
||||
<string>SS14</string>
|
||||
<key>CFBundleDisplayName</key>
|
||||
<string>Space Station 14</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>SS14</string>
|
||||
<!--
|
||||
Just a note about this icon.
|
||||
MacOS seems REALLY iffy about this and even when the file is correct,
|
||||
it can take forever before it decides to actually update it and display it.
|
||||
TL;DR Apple is stupid.
|
||||
-->
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>ss14</string>
|
||||
</dict>
|
||||
</plist>
|
||||
8
BuildFiles/Mac/Space Station 14.app/Contents/MacOS/SS14
Executable file
8
BuildFiles/Mac/Space Station 14.app/Contents/MacOS/SS14
Executable file
@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
|
||||
# cd to file containing script or something?
|
||||
BASEDIR=$(dirname "$0")
|
||||
echo "$BASEDIR"
|
||||
cd "$BASEDIR"
|
||||
|
||||
exec ../Resources/Robust.Client "$@"
|
||||
BIN
BuildFiles/Mac/Space Station 14.app/Contents/Resources/ss14.icns
Normal file
BIN
BuildFiles/Mac/Space Station 14.app/Contents/Resources/ss14.icns
Normal file
Binary file not shown.
40
CODE_OF_CONDUCT.md
Normal file
40
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,40 @@
|
||||
# Frontier Station Code of Conduct
|
||||
|
||||
Frontier Station's staff and community is made up volunteers from all over the world, working on every aspect of the project - including development, teaching, and hosting integral tools.
|
||||
|
||||
Diversity is one of our huge strengths, but it can also lead to communication issues and unhappiness. To that end, we have a few ground rules that we ask people to adhere to. This code applies equally to all levels of the project, from commenters to contributors to staff.
|
||||
|
||||
This isn’t an exhaustive list of things that you can’t do. Rather, take it in the spirit in which it’s intended - a guide to make it easier to enrich all of us and the technical communities in which we participate.
|
||||
|
||||
This code of conduct applies specifically to the Github repositories, discord community, and other spaces managed by the Frontier Station project or Frontier Station Staff. Some spaces, such as the Frontier Station Discord or the official Frontier Station game servers, have additional rules but are in spirit equal to what may be found in here.
|
||||
|
||||
If you believe someone is violating the code of conduct, we ask that you report it by contacting a Maintainer or staff member through [Discord](https://discord.gg/frontier), or emailing [zuus87@gmail.com](mailto:zuus87@gmail.com).
|
||||
|
||||
- **Be friendly and patient.**
|
||||
- **Be welcoming.** We strive to be a community that welcomes and supports people of all backgrounds and identities. This includes, but is not limited to members of any race, ethnicity, culture, national origin, colour, immigration status, social and economic class, educational level, sex, sexual orientation, gender identity and expression, age, size, family status, political belief, religion, and mental and physical ability.
|
||||
- **Be considerate.** Your work will be used by other people, and you in turn will depend on the work of others. Any decision you take will affect users and contributors, and you should take those consequences into account when making decisions. Remember that we're a world-wide community, so you might not be communicating in someone else's primary language. We have contributors of all skill levels, some even making their first foray into a new field with this project, so keep that in mind when discussing someone's work.
|
||||
- **Be respectful.** Not all of us will agree all the time, but disagreement is no excuse for poor behavior and poor manners. We might all experience some frustration now and then, but we cannot allow that frustration to turn into a personal attack. It’s important to remember that a community where people feel uncomfortable or threatened is not a productive one. Members of the Frontier Station community should be respectful when dealing with other members as well as with people outside the Frontier Station community. Assume contributions to the project, even those that do not end up being included, are made in good faith.
|
||||
- **Be careful in the words that you choose.** We are a community of professionals, and we conduct ourselves professionally. Be kind to others. Do not insult or put down other participants. Harassment and other exclusionary behavior aren't acceptable. This includes, but is not limited to:
|
||||
- Violent threats or language directed against another person.
|
||||
- Discriminatory jokes and language.
|
||||
- Posting sexually explicit or violent material.
|
||||
- Posting (or threatening to post) other people's personally identifying information ("doxing").
|
||||
- Personal insults, especially those using racist or sexist terms.
|
||||
- Unwelcome sexual attention.
|
||||
- Advocating for, or encouraging, any of the above behavior.
|
||||
- Repeated harassment of others. In general, if someone asks you to stop, then stop.
|
||||
- **When we disagree, try to understand why.** Disagreements, both social and technical, happen all the time and Frontier Station is no exception. It is important that we resolve disagreements and differing views constructively. Remember that we’re different. The strength of Frontier Station comes from its varied community, people from a wide range of backgrounds. Different people have different perspectives on issues. Being unable to understand why someone holds a viewpoint doesn’t mean that they’re wrong. Don’t forget that it is human to make mistakes and blaming each other doesn’t get us anywhere. Instead, focus on helping to resolve issues and learning from mistakes.
|
||||
|
||||
Original text courtesy of the [Speak Up! project](http://web.archive.org/web/20141109123859/http://speakup.io/coc.html).
|
||||
|
||||
## On Comunity Moderation
|
||||
|
||||
Deviating from the Code of Conduct on the Github repository may result in moderative actions taken by project Maintainers. This can involve your content being edited or deleted, and may result in a temporary or permanent block from the repository.
|
||||
|
||||
This is to ensure Frontier Station is a healthy community in which contributors feel encouraged and empowered to contribute, and to give you as a member of this community a chance to reflect on how you are interacting with it. While outright offensive and bigoted content will *always* be unacceptable on the repository, Maintainers are at liberty to take moderative actions against more ambiguous content that fail to provide constructive criticism, or that provides constructive criticism in a non-constructive manner. Examples of this include using hyperbole, bringing up PRs/changes unrelated to the discussion at hand, hostile tone, off-topic comments, creating PRs/Issues for the sole purpose of causing discussions, skirting the line of acceptable behavior, etc. Disagreeing with content or each other is fine and appreciated, but only as long as it's done with respect and in a constructive manner.
|
||||
|
||||
Maintainers are expected to adhere to the guidelines as listed in the [Github Moderation Guidelines](https://docs.spacestation14.com/en/general-development/github-moderation-guidelines.html), though may deviate should they feel it's in the best interest of the community. If you believe you had an action incorrectly applied against you, you are encouraged to contact staff via [Discord](https://discord.gg/frontier).
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is an edited version of the [Space Station 14 Code of Conduct](https://github.com/space-wizards/space-station-14/blob/master/CODE_OF_CONDUCT.md), which is an edited version of the [Django Code of Conduct](https://www.djangoproject.com/conduct/), licensed under CC BY 3.0, for the Frontier Station Github repository.
|
||||
130
CONTRIBUTING.md
Normal file
130
CONTRIBUTING.md
Normal file
@ -0,0 +1,130 @@
|
||||
# Contributing to Frontier Station
|
||||
|
||||
If you're considering contributing to Frontier Station, [Wizard's Den's PR guidelines](https://docs.spacestation14.com/en/general-development/codebase-info/pull-request-guidelines.html) are a good starting point for code quality and version tracking etiquette. Note that we do not have the same master/stable branch distinction.
|
||||
|
||||
Importantly, do not make webedits. From the text above:
|
||||
> Do not use GitHub's web editor to create PRs. PRs submitted through the web editor may be closed without review.
|
||||
|
||||
"Upstream" refers to the [space-wizards/space-station-14](https://github.com/space-wizards/space-station-14) repository that this fork was created from.
|
||||
|
||||
# Frontier-specific content
|
||||
|
||||
In general, anything you create from scratch (vs. modifying something that exists from upstream) should go in a Frontier-specific subfolder, `_NF`.
|
||||
|
||||
Examples:
|
||||
- `Content.Server/_NF/Shipyard/Systems/ShipyardSystem.cs`
|
||||
- `Resources/Prototypes/_NF/Loadouts/role_loadouts.yml`
|
||||
- `Resources/Audio/_NF/Voice/Goblin/goblin-scream-03.ogg`
|
||||
- `Resources/Textures/_NF/Tips/clippy.rsi/left.png`
|
||||
- `Resources/Locale/en-US/_NF/devices/pda.ftl`
|
||||
- `Resources/ServerInfo/_NF/Guidebook/Medical/Doc.xml`
|
||||
|
||||
# Changes to upstream files
|
||||
|
||||
If you make a change to an upstream C# or YAML file, **you must add comments on or around the changed lines**.
|
||||
The comments should clarify what changed, to make conflict resolution simpler when a file is changed upstream.
|
||||
If you make changes to values, to be consistent, leave a comment in the form `Frontier: OLD<NEW`.
|
||||
|
||||
For YAML specifically, if you add a component or add a list of contiguous fields, use block comments, but if you make limited edits to a component's fields, comment the fields individually.
|
||||
|
||||
For C# files, if you are adding a lot of code, consider using a partial class when it makes sense.
|
||||
|
||||
If cherry-picking upstream features, it is best to comment with the PR number that was cherry-picked.
|
||||
|
||||
As an aside, fluent (.ftl) files **do not support comments on the same line** as a locale value - leave a comment on the line above if modifying values.
|
||||
|
||||
## Examples of comments in upstream or ported files
|
||||
|
||||
A single line comment on a changed yml field:
|
||||
```yml
|
||||
- type: entity
|
||||
id: TorsoHarpy
|
||||
name: "harpy torso"
|
||||
parent: [PartHarpy, BaseTorso] # Frontier: add BaseTorso
|
||||
```
|
||||
|
||||
A change to a value (note: `OLD<NEW`)
|
||||
```yml
|
||||
- type: Gun
|
||||
fireRate: 4 # Frontier: 3<4
|
||||
availableModes:
|
||||
- SemiAuto
|
||||
```
|
||||
|
||||
A cyborg module with an added moduleId field (inline blank comment), a commented out bucket (inline blank comment), and a DroppableBorgModule that we've added (begin/end block comment).
|
||||
```yml
|
||||
- type: ItemBorgModule
|
||||
moduleId: Gardening # Frontier
|
||||
items:
|
||||
- HydroponicsToolMiniHoe
|
||||
- HydroponicsToolSpade
|
||||
- HydroponicsToolClippers
|
||||
# - Bucket # Frontier
|
||||
# Frontier: droppable borg items
|
||||
- type: DroppableBorgModule
|
||||
moduleId: Gardening
|
||||
items:
|
||||
- id: Bucket
|
||||
whitelist:
|
||||
tags:
|
||||
- Bucket
|
||||
# End Frontier
|
||||
```
|
||||
|
||||
A comment on a new imported namespace:
|
||||
```cs
|
||||
using Content.Client._NF.Emp.Overlays; // Frontier
|
||||
```
|
||||
|
||||
A pair of comments enclosing a block of added code:
|
||||
```cs
|
||||
component.Capacity = state.Capacity;
|
||||
|
||||
component.UIUpdateNeeded = true;
|
||||
|
||||
// Frontier: ensure signature colour is consistent
|
||||
if (TryComp<StampComponent>(uid, out var stamp))
|
||||
{
|
||||
stamp.StampedColor = state.Color;
|
||||
}
|
||||
// End Frontier
|
||||
```
|
||||
|
||||
An edit to a Delta-V locale file, note the `OLD<NEW` format and the separate line for the comment.
|
||||
```fluent
|
||||
# Frontier: "Job Whitelists"<"Role Whitelists"
|
||||
player-panel-job-whitelists = Role Whitelists
|
||||
```
|
||||
|
||||
# Mapping
|
||||
|
||||
For ship submissons, refer to the [Ship Submission Guidelines](https://frontierstation.wiki.gg/wiki/Ship_Submission_Guidelines) on the Frontier wiki.
|
||||
|
||||
In general:
|
||||
|
||||
Frontier uses specific prototypes for points of interest and ship maps (e.g. to store spawn information, station spawn data, or ship price and categories). For ships, these are stored in the VesselPrototype (Resources/Prototypes/_NF/Shipyard) or PointOfInterestPrototype (Resources/Prototypes/_NF/PointsOfInterest). If creating a new ship or POI, refer to existing prototypes.
|
||||
|
||||
If you are making changes to a map, check with the map's maintainer (or if none, its author), and avoid having multiple open features with changes to the same map.
|
||||
|
||||
Conflicts with maps make PRs mutually exclusive so either your work on the maintainer's work will be lost, communicate to avoid this!
|
||||
|
||||
# Before you submit
|
||||
|
||||
Double-check your diff on GitHub before submitting: look for unintended commits or changes and remove accidental whitespace or line-ending changes.
|
||||
|
||||
Additionally, for PRs that've been open for a long time, if you see `RobustToolbox` in the changed files, you have to revert it. Use `git checkout upstream/master RobustToolbox` (replacing `upstream` with the name of your new-frontiers-14/frontier-station-14 remote)
|
||||
|
||||
# Changelogs
|
||||
|
||||
Currently, all changelogs go to the Frontier changelog. The ADMIN: prefix does nothing at the moment.
|
||||
|
||||
# Additional resources
|
||||
|
||||
If you are new to contributing to SS14 in general, have a look at the [SS14 docs](https://docs.spacestation14.io/) or ask for help in `#dev-help` on [Discord](https://discord.gg/tpuAT7d3zm/)!
|
||||
|
||||
## AI-Generated Content
|
||||
You may use AI tools to assist with code, but any AI-generated code must be thoroughly tested and audited before submission. Submitting untested or unaudited AI-generated code is not allowed.
|
||||
|
||||
AI-generated sprites and art are not allowed to be submitted to the repository.
|
||||
|
||||
Trying to PR untested/unaudited AI-generated code or any AI-generated art may result in you being banned from contributing.
|
||||
43
COPYRIGHT-AGPLv3.txt
Normal file
43
COPYRIGHT-AGPLv3.txt
Normal file
@ -0,0 +1,43 @@
|
||||
Frontier Station
|
||||
|
||||
Copyright (C) New Frontiers 2023-2025
|
||||
|
||||
Frontier Station uses code from the following AGPLv3-licensed projects:
|
||||
|
||||
Einstein Engines
|
||||
|
||||
Copyright (C) SimpleStation14 2024-2025
|
||||
|
||||
Corvax Frontier
|
||||
|
||||
Copyright (C) Corvax 2021-2025
|
||||
|
||||
Delta-V
|
||||
|
||||
Copyright (C) Delta V 2023-2025
|
||||
|
||||
Estacao Pirata
|
||||
|
||||
Copyright (C) estação-pirata 2024-2025
|
||||
|
||||
Goob-Station
|
||||
|
||||
Copyright (C) Goob Station 2024-2025
|
||||
|
||||
Impstation
|
||||
|
||||
Copyright (C) Impstation 2024-2025
|
||||
|
||||
Nuclear 14
|
||||
|
||||
Copyright (C) Vault-Overseers 2022-2025
|
||||
|
||||
Monolith
|
||||
|
||||
Copyright (C) Monolith 2025
|
||||
|
||||
White Dream
|
||||
|
||||
Copyright (C) WWhite Dream Project 2024-2025
|
||||
|
||||
For license terms, refer to LEGAL.md.
|
||||
168
Content.Benchmarks/ColorInterpolateBenchmark.cs
Normal file
168
Content.Benchmarks/ColorInterpolateBenchmark.cs
Normal file
@ -0,0 +1,168 @@
|
||||
#if NETCOREAPP
|
||||
using System.Runtime.Intrinsics;
|
||||
using System.Runtime.Intrinsics.X86;
|
||||
#endif
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.Maths;
|
||||
using Robust.Shared.Random;
|
||||
using SysVector4 = System.Numerics.Vector4;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
[DisassemblyDiagnoser]
|
||||
[Virtual]
|
||||
public class ColorInterpolateBenchmark
|
||||
{
|
||||
#if NETCOREAPP
|
||||
private const MethodImplOptions AggressiveOpt = MethodImplOptions.AggressiveOptimization;
|
||||
#else
|
||||
private const MethodImplOptions AggressiveOpt = default;
|
||||
#endif
|
||||
|
||||
private (Color, Color)[] _colors;
|
||||
private Color[] _output;
|
||||
|
||||
[Params(100)] public int N { get; set; }
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
var random = new Random(3005);
|
||||
|
||||
_colors = new (Color, Color)[N];
|
||||
_output = new Color[N];
|
||||
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
var r1 = random.NextFloat();
|
||||
var g1 = random.NextFloat();
|
||||
var b1 = random.NextFloat();
|
||||
var a1 = random.NextFloat();
|
||||
|
||||
var r2 = random.NextFloat();
|
||||
var g2 = random.NextFloat();
|
||||
var b2 = random.NextFloat();
|
||||
var a2 = random.NextFloat();
|
||||
|
||||
_colors[i] = (new Color(r1, g1, b1, a1), new Color(r2, g2, b2, a2));
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchSimple()
|
||||
{
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
ref var tuple = ref _colors[i];
|
||||
_output[i] = InterpolateSimple(tuple.Item1, tuple.Item2, 0.5f);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
[Benchmark]
|
||||
public void BenchSysVector4In()
|
||||
{
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
ref var tuple = ref _colors[i];
|
||||
_output[i] = InterpolateSysVector4In(tuple.Item1, tuple.Item2, 0.5f);
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchSysVector4()
|
||||
{
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
ref var tuple = ref _colors[i];
|
||||
_output[i] = InterpolateSysVector4(tuple.Item1, tuple.Item2, 0.5f);
|
||||
}
|
||||
}
|
||||
|
||||
#if NETCOREAPP
|
||||
[Benchmark]
|
||||
public void BenchSimd()
|
||||
{
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
ref var tuple = ref _colors[i];
|
||||
_output[i] = InterpolateSimd(tuple.Item1, tuple.Item2, 0.5f);
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchSimdIn()
|
||||
{
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
ref var tuple = ref _colors[i];
|
||||
_output[i] = InterpolateSimdIn(tuple.Item1, tuple.Item2, 0.5f);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
[MethodImpl(AggressiveOpt)]
|
||||
public static Color InterpolateSimple(Color a, Color b, float lambda)
|
||||
{
|
||||
return new(
|
||||
a.R + (b.R - a.R) * lambda,
|
||||
a.G + (b.G - a.G) * lambda,
|
||||
a.B + (b.G - a.B) * lambda,
|
||||
a.A + (b.A - a.A) * lambda
|
||||
);
|
||||
}
|
||||
|
||||
[MethodImpl(AggressiveOpt)]
|
||||
public static Color InterpolateSysVector4(Color a, Color b,
|
||||
float lambda)
|
||||
{
|
||||
ref var sva = ref Unsafe.As<Color, SysVector4>(ref a);
|
||||
ref var svb = ref Unsafe.As<Color, SysVector4>(ref b);
|
||||
|
||||
var res = SysVector4.Lerp(sva, svb, lambda);
|
||||
|
||||
return Unsafe.As<SysVector4, Color>(ref res);
|
||||
}
|
||||
|
||||
[MethodImpl(AggressiveOpt)]
|
||||
public static Color InterpolateSysVector4In(in Color endPoint1, in Color endPoint2,
|
||||
float lambda)
|
||||
{
|
||||
ref var sva = ref Unsafe.As<Color, SysVector4>(ref Unsafe.AsRef(in endPoint1));
|
||||
ref var svb = ref Unsafe.As<Color, SysVector4>(ref Unsafe.AsRef(in endPoint2));
|
||||
|
||||
var res = SysVector4.Lerp(svb, sva, lambda);
|
||||
|
||||
return Unsafe.As<SysVector4, Color>(ref res);
|
||||
}
|
||||
|
||||
#if NETCOREAPP
|
||||
[MethodImpl(AggressiveOpt)]
|
||||
public static Color InterpolateSimd(Color a, Color b,
|
||||
float lambda)
|
||||
{
|
||||
var vecA = Unsafe.As<Color, Vector128<float>>(ref a);
|
||||
var vecB = Unsafe.As<Color, Vector128<float>>(ref b);
|
||||
|
||||
vecB = Fma.MultiplyAdd(Sse.Subtract(vecB, vecA), Vector128.Create(lambda), vecA);
|
||||
|
||||
return Unsafe.As<Vector128<float>, Color>(ref vecB);
|
||||
}
|
||||
|
||||
[MethodImpl(AggressiveOpt)]
|
||||
public static Color InterpolateSimdIn(in Color a, in Color b,
|
||||
float lambda)
|
||||
{
|
||||
var vecA = Unsafe.As<Color, Vector128<float>>(ref Unsafe.AsRef(in a));
|
||||
var vecB = Unsafe.As<Color, Vector128<float>>(ref Unsafe.AsRef(in b));
|
||||
|
||||
vecB = Fma.MultiplyAdd(Sse.Subtract(vecB, vecA), Vector128.Create(lambda), vecA);
|
||||
|
||||
return Unsafe.As<Vector128<float>, Color>(ref vecB);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
259
Content.Benchmarks/ComponentFetchBenchmark.cs
Normal file
259
Content.Benchmarks/ComponentFetchBenchmark.cs
Normal file
@ -0,0 +1,259 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.Utility;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
[SimpleJob]
|
||||
[Virtual]
|
||||
public class ComponentFetchBenchmark
|
||||
{
|
||||
[Params(5000)] public int NEnt { get; set; }
|
||||
|
||||
private readonly Dictionary<(EntityUid, Type), BComponent>
|
||||
_componentsFlat = new();
|
||||
|
||||
private readonly Dictionary<Type, Dictionary<EntityUid, BComponent>> _componentsPart =
|
||||
new();
|
||||
|
||||
private UniqueIndex<Type, BComponent> _allComponents = new();
|
||||
|
||||
private readonly List<EntityUid> _lookupEntities = new();
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
var random = new Random();
|
||||
|
||||
_componentsPart[typeof(BComponent1)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponent2)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponent3)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponent4)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponentLookup)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponent6)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponent7)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponent8)] = new Dictionary<EntityUid, BComponent>();
|
||||
_componentsPart[typeof(BComponent9)] = new Dictionary<EntityUid, BComponent>();
|
||||
|
||||
for (var i = 0u; i < NEnt; i++)
|
||||
{
|
||||
var eId = new EntityUid(i);
|
||||
|
||||
if (random.Next(1) == 0)
|
||||
{
|
||||
_lookupEntities.Add(eId);
|
||||
}
|
||||
|
||||
var comps = new List<BComponent>
|
||||
{
|
||||
new BComponent1(),
|
||||
new BComponent2(),
|
||||
new BComponent3(),
|
||||
new BComponent4(),
|
||||
new BComponent6(),
|
||||
new BComponent7(),
|
||||
new BComponent8(),
|
||||
new BComponent9(),
|
||||
};
|
||||
|
||||
if (random.Next(1000) == 0)
|
||||
{
|
||||
comps.Add(new BComponentLookup());
|
||||
}
|
||||
|
||||
foreach (var comp in comps)
|
||||
{
|
||||
comp.Uid = eId;
|
||||
var type = comp.GetType();
|
||||
_componentsPart[type][eId] = comp;
|
||||
_componentsFlat[(eId, type)] = comp;
|
||||
_allComponents.Add(type, comp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// These two benchmarks are find "needles in haystack" components.
|
||||
// We try to look up a component that 0.1% of entities have on 1% of entities.
|
||||
// Examples of this in the engine are VisibilityComponent lookups during PVS.
|
||||
[Benchmark]
|
||||
public void FindPart()
|
||||
{
|
||||
foreach (var entityUid in _lookupEntities)
|
||||
{
|
||||
var d = _componentsPart[typeof(BComponentLookup)];
|
||||
d.TryGetValue(entityUid, out _);
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void FindFlat()
|
||||
{
|
||||
foreach (var entityUid in _lookupEntities)
|
||||
{
|
||||
_componentsFlat.TryGetValue((entityUid, typeof(BComponentLookup)), out _);
|
||||
}
|
||||
}
|
||||
|
||||
// Iteration benchmarks:
|
||||
// We try to iterate every instance of a single component (BComponent1) and see which is faster.
|
||||
[Benchmark]
|
||||
public void IterPart()
|
||||
{
|
||||
var list = _componentsPart[typeof(BComponent1)];
|
||||
var arr = new BComponent[list.Count];
|
||||
var i = 0;
|
||||
foreach (var c in list.Values)
|
||||
{
|
||||
arr[i++] = c;
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void IterFlat()
|
||||
{
|
||||
var list = _allComponents[typeof(BComponent1)];
|
||||
var arr = new BComponent[list.Count];
|
||||
var i = 0;
|
||||
foreach (var c in list)
|
||||
{
|
||||
arr[i++] = c;
|
||||
}
|
||||
}
|
||||
|
||||
// We do the same as the iteration benchmarks but re-fetch the component every iteration.
|
||||
// This is what entity systems mostly do via entity queries because crappy code.
|
||||
[Benchmark]
|
||||
public void IterFetchPart()
|
||||
{
|
||||
var list = _componentsPart[typeof(BComponent1)];
|
||||
var arr = new BComponent[list.Count];
|
||||
var i = 0;
|
||||
foreach (var c in list.Values)
|
||||
{
|
||||
var eId = c.Uid;
|
||||
var d = _componentsPart[typeof(BComponent1)];
|
||||
arr[i++] = d[eId];
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void IterFetchFlat()
|
||||
{
|
||||
var list = _allComponents[typeof(BComponent1)];
|
||||
var arr = new BComponent[list.Count];
|
||||
var i = 0;
|
||||
foreach (var c in list)
|
||||
{
|
||||
var eId = c.Uid;
|
||||
arr[i++] = _componentsFlat[(eId, typeof(BComponent1))];
|
||||
}
|
||||
}
|
||||
|
||||
// Same as the previous benchmarks but with BComponentLookup instead.
|
||||
// Which is only on 1% of entities.
|
||||
[Benchmark]
|
||||
public void IterFetchPartRare()
|
||||
{
|
||||
var list = _componentsPart[typeof(BComponentLookup)];
|
||||
var arr = new BComponent[list.Count];
|
||||
var i = 0;
|
||||
foreach (var c in list.Values)
|
||||
{
|
||||
var eId = c.Uid;
|
||||
var d = _componentsPart[typeof(BComponentLookup)];
|
||||
arr[i++] = d[eId];
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void IterFetchFlatRare()
|
||||
{
|
||||
var list = _allComponents[typeof(BComponentLookup)];
|
||||
var arr = new BComponent[list.Count];
|
||||
var i = 0;
|
||||
foreach (var c in list)
|
||||
{
|
||||
var eId = c.Uid;
|
||||
arr[i++] = _componentsFlat[(eId, typeof(BComponentLookup))];
|
||||
}
|
||||
}
|
||||
|
||||
private readonly struct EntityUid : IEquatable<EntityUid>
|
||||
{
|
||||
public readonly uint Value;
|
||||
|
||||
public EntityUid(uint value)
|
||||
{
|
||||
Value = value;
|
||||
}
|
||||
|
||||
public bool Equals(EntityUid other)
|
||||
{
|
||||
return Value == other.Value;
|
||||
}
|
||||
|
||||
public override bool Equals(object obj)
|
||||
{
|
||||
return obj is EntityUid other && Equals(other);
|
||||
}
|
||||
|
||||
public override int GetHashCode()
|
||||
{
|
||||
return (int) Value;
|
||||
}
|
||||
|
||||
public static bool operator ==(EntityUid left, EntityUid right)
|
||||
{
|
||||
return left.Equals(right);
|
||||
}
|
||||
|
||||
public static bool operator !=(EntityUid left, EntityUid right)
|
||||
{
|
||||
return !left.Equals(right);
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class BComponent
|
||||
{
|
||||
public EntityUid Uid;
|
||||
}
|
||||
|
||||
private sealed class BComponent1 : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponent2 : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponent3 : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponent4 : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponentLookup : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponent6 : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponent7 : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponent8 : BComponent
|
||||
{
|
||||
}
|
||||
|
||||
private sealed class BComponent9 : BComponent
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
273
Content.Benchmarks/ComponentQueryBenchmark.cs
Normal file
273
Content.Benchmarks/ComponentQueryBenchmark.cs
Normal file
@ -0,0 +1,273 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading.Tasks;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Configs;
|
||||
using Content.IntegrationTests;
|
||||
using Content.IntegrationTests.Pair;
|
||||
using Content.Shared.Clothing.Components;
|
||||
using Content.Shared.Doors.Components;
|
||||
using Content.Shared.Item;
|
||||
using Robust.Shared;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.EntitySerialization;
|
||||
using Robust.Shared.EntitySerialization.Systems;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.Map.Components;
|
||||
using Robust.Shared.Random;
|
||||
using Robust.Shared.Utility;
|
||||
|
||||
namespace Content.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// Benchmarks for comparing the speed of various component fetching/lookup related methods, including directed event
|
||||
/// subscriptions
|
||||
/// </summary>
|
||||
[Virtual]
|
||||
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
|
||||
[CategoriesColumn]
|
||||
public class ComponentQueryBenchmark
|
||||
{
|
||||
public const string Map = "Maps/saltern.yml";
|
||||
|
||||
private TestPair _pair = default!;
|
||||
private IEntityManager _entMan = default!;
|
||||
private EntityQuery<ItemComponent> _itemQuery;
|
||||
private EntityQuery<ClothingComponent> _clothingQuery;
|
||||
private EntityQuery<MapComponent> _mapQuery;
|
||||
private EntityUid[] _items = default!;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
ProgramShared.PathOffset = "../../../../";
|
||||
PoolManager.Startup(typeof(QueryBenchSystem).Assembly);
|
||||
|
||||
_pair = PoolManager.GetServerClient().GetAwaiter().GetResult();
|
||||
_entMan = _pair.Server.ResolveDependency<IEntityManager>();
|
||||
|
||||
_itemQuery = _entMan.GetEntityQuery<ItemComponent>();
|
||||
_clothingQuery = _entMan.GetEntityQuery<ClothingComponent>();
|
||||
_mapQuery = _entMan.GetEntityQuery<MapComponent>();
|
||||
|
||||
_pair.Server.ResolveDependency<IRobustRandom>().SetSeed(42);
|
||||
_pair.Server.WaitPost(() =>
|
||||
{
|
||||
var map = new ResPath(Map);
|
||||
var opts = DeserializationOptions.Default with {InitializeMaps = true};
|
||||
if (!_entMan.System<MapLoaderSystem>().TryLoadMap(map, out _, out _, opts))
|
||||
throw new Exception("Map load failed");
|
||||
}).GetAwaiter().GetResult();
|
||||
|
||||
_items = new EntityUid[_entMan.Count<ItemComponent>()];
|
||||
var i = 0;
|
||||
var enumerator = _entMan.AllEntityQueryEnumerator<ItemComponent>();
|
||||
while (enumerator.MoveNext(out var uid, out _))
|
||||
{
|
||||
_items[i++] = uid;
|
||||
}
|
||||
}
|
||||
|
||||
[GlobalCleanup]
|
||||
public async Task Cleanup()
|
||||
{
|
||||
await _pair.DisposeAsync();
|
||||
PoolManager.Shutdown();
|
||||
}
|
||||
|
||||
#region TryComp
|
||||
|
||||
/// <summary>
|
||||
/// Baseline TryComp benchmark. When the benchmark was created, around 40% of the items were clothing.
|
||||
/// </summary>
|
||||
[Benchmark(Baseline = true)]
|
||||
[BenchmarkCategory("TryComp")]
|
||||
public int TryComp()
|
||||
{
|
||||
var hashCode = 0;
|
||||
foreach (var uid in _items)
|
||||
{
|
||||
if (_clothingQuery.TryGetComponent(uid, out var clothing))
|
||||
hashCode = HashCode.Combine(hashCode, clothing.GetHashCode());
|
||||
}
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Variant of <see cref="TryComp"/> that is meant to always fail to get a component.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("TryComp")]
|
||||
public int TryCompFail()
|
||||
{
|
||||
var hashCode = 0;
|
||||
foreach (var uid in _items)
|
||||
{
|
||||
if (_mapQuery.TryGetComponent(uid, out var map))
|
||||
hashCode = HashCode.Combine(hashCode, map.GetHashCode());
|
||||
}
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Variant of <see cref="TryComp"/> that is meant to always succeed getting a component.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("TryComp")]
|
||||
public int TryCompSucceed()
|
||||
{
|
||||
var hashCode = 0;
|
||||
foreach (var uid in _items)
|
||||
{
|
||||
if (_itemQuery.TryGetComponent(uid, out var item))
|
||||
hashCode = HashCode.Combine(hashCode, item.GetHashCode());
|
||||
}
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Variant of <see cref="TryComp"/> that uses `Resolve()` to try get the component.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("TryComp")]
|
||||
public int Resolve()
|
||||
{
|
||||
var hashCode = 0;
|
||||
foreach (var uid in _items)
|
||||
{
|
||||
DoResolve(uid, ref hashCode);
|
||||
}
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
public void DoResolve(EntityUid uid, ref int hash, ClothingComponent? clothing = null)
|
||||
{
|
||||
if (_clothingQuery.Resolve(uid, ref clothing, false))
|
||||
hash = HashCode.Combine(hash, clothing.GetHashCode());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Enumeration
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Item Enumerator")]
|
||||
public int SingleItemEnumerator()
|
||||
{
|
||||
var hashCode = 0;
|
||||
var enumerator = _entMan.AllEntityQueryEnumerator<ItemComponent>();
|
||||
while (enumerator.MoveNext(out var item))
|
||||
{
|
||||
hashCode = HashCode.Combine(hashCode, item.GetHashCode());
|
||||
}
|
||||
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Item Enumerator")]
|
||||
public int DoubleItemEnumerator()
|
||||
{
|
||||
var hashCode = 0;
|
||||
var enumerator = _entMan.AllEntityQueryEnumerator<ClothingComponent, ItemComponent>();
|
||||
while (enumerator.MoveNext(out _, out var item))
|
||||
{
|
||||
hashCode = HashCode.Combine(hashCode, item.GetHashCode());
|
||||
}
|
||||
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Item Enumerator")]
|
||||
public int TripleItemEnumerator()
|
||||
{
|
||||
var hashCode = 0;
|
||||
var enumerator = _entMan.AllEntityQueryEnumerator<ClothingComponent, ItemComponent, TransformComponent>();
|
||||
while (enumerator.MoveNext(out _, out _, out var xform))
|
||||
{
|
||||
hashCode = HashCode.Combine(hashCode, xform.GetHashCode());
|
||||
}
|
||||
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Airlock Enumerator")]
|
||||
public int SingleAirlockEnumerator()
|
||||
{
|
||||
var hashCode = 0;
|
||||
var enumerator = _entMan.AllEntityQueryEnumerator<AirlockComponent>();
|
||||
while (enumerator.MoveNext(out var airlock))
|
||||
{
|
||||
hashCode = HashCode.Combine(hashCode, airlock.GetHashCode());
|
||||
}
|
||||
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Airlock Enumerator")]
|
||||
public int DoubleAirlockEnumerator()
|
||||
{
|
||||
var hashCode = 0;
|
||||
var enumerator = _entMan.AllEntityQueryEnumerator<AirlockComponent, DoorComponent>();
|
||||
while (enumerator.MoveNext(out _, out var door))
|
||||
{
|
||||
hashCode = HashCode.Combine(hashCode, door.GetHashCode());
|
||||
}
|
||||
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Airlock Enumerator")]
|
||||
public int TripleAirlockEnumerator()
|
||||
{
|
||||
var hashCode = 0;
|
||||
var enumerator = _entMan.AllEntityQueryEnumerator<AirlockComponent, DoorComponent, TransformComponent>();
|
||||
while (enumerator.MoveNext(out _, out _, out var xform))
|
||||
{
|
||||
hashCode = HashCode.Combine(hashCode, xform.GetHashCode());
|
||||
}
|
||||
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
[Benchmark(Baseline = true)]
|
||||
[BenchmarkCategory("Events")]
|
||||
public int StructEvents()
|
||||
{
|
||||
var ev = new QueryBenchEvent();
|
||||
foreach (var uid in _items)
|
||||
{
|
||||
_entMan.EventBus.RaiseLocalEvent(uid, ref ev);
|
||||
}
|
||||
|
||||
return ev.HashCode;
|
||||
}
|
||||
}
|
||||
|
||||
[ByRefEvent]
|
||||
public struct QueryBenchEvent
|
||||
{
|
||||
public int HashCode;
|
||||
}
|
||||
|
||||
public sealed class QueryBenchSystem : EntitySystem
|
||||
{
|
||||
public override void Initialize()
|
||||
{
|
||||
base.Initialize();
|
||||
SubscribeLocalEvent<ClothingComponent, QueryBenchEvent>(OnEvent);
|
||||
}
|
||||
|
||||
private void OnEvent(EntityUid uid, ClothingComponent component, ref QueryBenchEvent args)
|
||||
{
|
||||
args.HashCode = HashCode.Combine(args.HashCode, component.GetHashCode());
|
||||
}
|
||||
}
|
||||
28
Content.Benchmarks/Content.Benchmarks.csproj
Normal file
28
Content.Benchmarks/Content.Benchmarks.csproj
Normal file
@ -0,0 +1,28 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<Import Project="..\RobustToolbox\MSBuild\Robust.Properties.targets" />
|
||||
<PropertyGroup>
|
||||
<!-- Work around https://github.com/dotnet/project-system/issues/4314 -->
|
||||
<TargetFramework>$(TargetFramework)</TargetFramework>
|
||||
<OutputPath>..\bin\Content.Benchmarks\</OutputPath>
|
||||
<IsPackable>false</IsPackable>
|
||||
<AppendTargetFrameworkToOutputPath>false</AppendTargetFrameworkToOutputPath>
|
||||
<OutputType>Exe</OutputType>
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<LangVersion>12</LangVersion>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BenchmarkDotNet" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\Content.Client\Content.Client.csproj" />
|
||||
<ProjectReference Include="..\Content.Server\Content.Server.csproj" />
|
||||
<ProjectReference Include="..\Content.Shared\Content.Shared.csproj" />
|
||||
<ProjectReference Include="..\Content.Tests\Content.Tests.csproj" />
|
||||
<ProjectReference Include="..\Content.IntegrationTests\Content.IntegrationTests.csproj" />
|
||||
<ProjectReference Include="..\RobustToolbox\Robust.Benchmarks\Robust.Benchmarks.csproj" />
|
||||
<ProjectReference Include="..\RobustToolbox\Robust.Client\Robust.Client.csproj" />
|
||||
<ProjectReference Include="..\RobustToolbox\Robust.Server\Robust.Server.csproj" />
|
||||
<ProjectReference Include="..\RobustToolbox\Robust.Shared.Maths\Robust.Shared.Maths.csproj" />
|
||||
<ProjectReference Include="..\RobustToolbox\Robust.Shared\Robust.Shared.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
174
Content.Benchmarks/DeltaPressureBenchmark.cs
Normal file
174
Content.Benchmarks/DeltaPressureBenchmark.cs
Normal file
@ -0,0 +1,174 @@
|
||||
using System.Threading.Tasks;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Diagnosers;
|
||||
using Content.IntegrationTests;
|
||||
using Content.IntegrationTests.Pair;
|
||||
using Content.Server.Atmos.Components;
|
||||
using Content.Server.Atmos.EntitySystems;
|
||||
using Content.Shared.Atmos.Components;
|
||||
using Content.Shared.CCVar;
|
||||
using Robust.Shared;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.Configuration;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.Map;
|
||||
using Robust.Shared.Map.Components;
|
||||
using Robust.Shared.Maths;
|
||||
using Robust.Shared.Prototypes;
|
||||
using Robust.Shared.Random;
|
||||
|
||||
namespace Content.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// Spawns N number of entities with a <see cref="DeltaPressureComponent"/> and
|
||||
/// simulates them for a number of ticks M.
|
||||
/// </summary>
|
||||
[Virtual]
|
||||
[GcServer(true)]
|
||||
//[MemoryDiagnoser]
|
||||
//[ThreadingDiagnoser]
|
||||
public class DeltaPressureBenchmark
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of entities (windows, really) to spawn with a <see cref="DeltaPressureComponent"/>.
|
||||
/// </summary>
|
||||
[Params(1, 10, 100, 1000, 5000, 10000, 50000, 100000)]
|
||||
public int EntityCount;
|
||||
|
||||
/// <summary>
|
||||
/// Number of entities that each parallel processing job will handle.
|
||||
/// </summary>
|
||||
// [Params(1, 10, 100, 1000, 5000, 10000)] For testing how multithreading parameters affect performance (THESE TESTS TAKE 16+ HOURS TO RUN)
|
||||
[Params(10)]
|
||||
public int BatchSize;
|
||||
|
||||
/// <summary>
|
||||
/// Number of entities to process per iteration in the DeltaPressure
|
||||
/// processing loop.
|
||||
/// </summary>
|
||||
// [Params(100, 1000, 5000, 10000, 50000)]
|
||||
[Params(1000)]
|
||||
public int EntitiesPerIteration;
|
||||
|
||||
private readonly EntProtoId _windowProtoId = "Window";
|
||||
private readonly EntProtoId _wallProtoId = "WallPlastitaniumIndestructible";
|
||||
|
||||
private TestPair _pair = default!;
|
||||
private IEntityManager _entMan = default!;
|
||||
private SharedMapSystem _map = default!;
|
||||
private IRobustRandom _random = default!;
|
||||
private IConfigurationManager _cvar = default!;
|
||||
private ITileDefinitionManager _tileDefMan = default!;
|
||||
private AtmosphereSystem _atmospereSystem = default!;
|
||||
|
||||
private Entity<GridAtmosphereComponent, GasTileOverlayComponent, MapGridComponent, TransformComponent>
|
||||
_testEnt;
|
||||
|
||||
[GlobalSetup]
|
||||
public async Task SetupAsync()
|
||||
{
|
||||
ProgramShared.PathOffset = "../../../../";
|
||||
PoolManager.Startup();
|
||||
_pair = await PoolManager.GetServerClient();
|
||||
var server = _pair.Server;
|
||||
|
||||
var mapdata = await _pair.CreateTestMap();
|
||||
|
||||
_entMan = server.ResolveDependency<IEntityManager>();
|
||||
_map = _entMan.System<SharedMapSystem>();
|
||||
_random = server.ResolveDependency<IRobustRandom>();
|
||||
_cvar = server.ResolveDependency<IConfigurationManager>();
|
||||
_tileDefMan = server.ResolveDependency<ITileDefinitionManager>();
|
||||
_atmospereSystem = _entMan.System<AtmosphereSystem>();
|
||||
|
||||
_random.SetSeed(69420); // Randomness needs to be deterministic for benchmarking.
|
||||
|
||||
_cvar.SetCVar(CCVars.DeltaPressureParallelToProcessPerIteration, EntitiesPerIteration);
|
||||
_cvar.SetCVar(CCVars.DeltaPressureParallelBatchSize, BatchSize);
|
||||
|
||||
var plating = _tileDefMan["Plating"].TileId;
|
||||
|
||||
/*
|
||||
Basically, we want to have a 5-wide grid of tiles.
|
||||
Edges are walled, and the length of the grid is determined by N + 2.
|
||||
Windows should only touch the top and bottom walls, and each other.
|
||||
*/
|
||||
|
||||
var length = EntityCount + 2; // ensures we can spawn exactly N windows between side walls
|
||||
const int height = 5;
|
||||
|
||||
await server.WaitPost(() =>
|
||||
{
|
||||
// Fill required tiles (extend grid) with plating
|
||||
for (var x = 0; x < length; x++)
|
||||
{
|
||||
for (var y = 0; y < height; y++)
|
||||
{
|
||||
_map.SetTile(mapdata.Grid, mapdata.Grid, new Vector2i(x, y), new Tile(plating));
|
||||
}
|
||||
}
|
||||
|
||||
// Spawn perimeter walls and windows row in the middle (y = 2)
|
||||
const int midY = height / 2;
|
||||
for (var x = 0; x < length; x++)
|
||||
{
|
||||
for (var y = 0; y < height; y++)
|
||||
{
|
||||
var coords = new EntityCoordinates(mapdata.Grid, x + 0.5f, y + 0.5f);
|
||||
|
||||
var isPerimeter = x == 0 || x == length - 1 || y == 0 || y == height - 1;
|
||||
if (isPerimeter)
|
||||
{
|
||||
_entMan.SpawnEntity(_wallProtoId, coords);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Spawn windows only on the middle row, spanning interior (excluding side walls)
|
||||
if (y == midY)
|
||||
{
|
||||
_entMan.SpawnEntity(_windowProtoId, coords);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Next we run the fixgridatmos command to ensure that we have some air on our grid.
|
||||
// Wait a little bit as well.
|
||||
// TODO: Unhardcode command magic string when fixgridatmos is an actual command we can ref and not just
|
||||
// a stamp-on in AtmosphereSystem.
|
||||
await _pair.WaitCommand("fixgridatmos " + mapdata.Grid.Owner, 1);
|
||||
|
||||
var uid = mapdata.Grid.Owner;
|
||||
_testEnt = new Entity<GridAtmosphereComponent, GasTileOverlayComponent, MapGridComponent, TransformComponent>(
|
||||
uid,
|
||||
_entMan.GetComponent<GridAtmosphereComponent>(uid),
|
||||
_entMan.GetComponent<GasTileOverlayComponent>(uid),
|
||||
_entMan.GetComponent<MapGridComponent>(uid),
|
||||
_entMan.GetComponent<TransformComponent>(uid));
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public async Task PerformFullProcess()
|
||||
{
|
||||
await _pair.Server.WaitPost(() =>
|
||||
{
|
||||
while (!_atmospereSystem.RunProcessingStage(_testEnt, AtmosphereProcessingState.DeltaPressure)) { }
|
||||
});
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public async Task PerformSingleRunProcess()
|
||||
{
|
||||
await _pair.Server.WaitPost(() =>
|
||||
{
|
||||
_atmospereSystem.RunProcessingStage(_testEnt, AtmosphereProcessingState.DeltaPressure);
|
||||
});
|
||||
}
|
||||
|
||||
[GlobalCleanup]
|
||||
public async Task CleanupAsync()
|
||||
{
|
||||
await _pair.DisposeAsync();
|
||||
PoolManager.Shutdown();
|
||||
}
|
||||
}
|
||||
70
Content.Benchmarks/DependencyInjectBenchmark.cs
Normal file
70
Content.Benchmarks/DependencyInjectBenchmark.cs
Normal file
@ -0,0 +1,70 @@
|
||||
/*
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Robust.Shared.IoC;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
// To actually run this benchmark you'll have to make DependencyCollection public so it's accessible.
|
||||
|
||||
[Virtual]
|
||||
public class DependencyInjectBenchmark
|
||||
{
|
||||
[Params(InjectMode.Reflection, InjectMode.DynamicMethod)]
|
||||
public InjectMode Mode { get; set; }
|
||||
|
||||
private DependencyCollection _dependencyCollection;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_dependencyCollection = new DependencyCollection();
|
||||
_dependencyCollection.Register<X1, X1>();
|
||||
_dependencyCollection.Register<X2, X2>();
|
||||
_dependencyCollection.Register<X3, X3>();
|
||||
_dependencyCollection.Register<X4, X4>();
|
||||
_dependencyCollection.Register<X5, X5>();
|
||||
|
||||
_dependencyCollection.BuildGraph();
|
||||
|
||||
switch (Mode)
|
||||
{
|
||||
case InjectMode.Reflection:
|
||||
break;
|
||||
case InjectMode.DynamicMethod:
|
||||
// Running this without oneOff will cause DependencyCollection to cache the DynamicMethod injector.
|
||||
// So future injections (even with oneOff) will keep using the DynamicMethod.
|
||||
// AKA, be fast.
|
||||
_dependencyCollection.InjectDependencies(new TestDummy());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void Inject()
|
||||
{
|
||||
_dependencyCollection.InjectDependencies(new TestDummy(), true);
|
||||
}
|
||||
|
||||
public enum InjectMode
|
||||
{
|
||||
Reflection,
|
||||
DynamicMethod
|
||||
}
|
||||
|
||||
private sealed class X1 { }
|
||||
private sealed class X2 { }
|
||||
private sealed class X3 { }
|
||||
private sealed class X4 { }
|
||||
private sealed class X5 { }
|
||||
|
||||
private sealed class TestDummy
|
||||
{
|
||||
[Dependency] private readonly X1 _x1;
|
||||
[Dependency] private readonly X2 _x2;
|
||||
[Dependency] private readonly X3 _x3;
|
||||
[Dependency] private readonly X4 _x4;
|
||||
[Dependency] private readonly X5 _x5;
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
142
Content.Benchmarks/DeviceNetworkingBenchmark.cs
Normal file
142
Content.Benchmarks/DeviceNetworkingBenchmark.cs
Normal file
@ -0,0 +1,142 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Content.IntegrationTests;
|
||||
using Content.IntegrationTests.Pair;
|
||||
using Content.IntegrationTests.Tests.DeviceNetwork;
|
||||
using Content.Server.DeviceNetwork.Systems;
|
||||
using Content.Shared.DeviceNetwork;
|
||||
using Robust.Shared;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.Map;
|
||||
|
||||
namespace Content.Benchmarks;
|
||||
|
||||
[Virtual]
|
||||
[MemoryDiagnoser]
|
||||
public class DeviceNetworkingBenchmark
|
||||
{
|
||||
private TestPair _pair = default!;
|
||||
private DeviceNetworkTestSystem _deviceNetTestSystem = default!;
|
||||
private DeviceNetworkSystem _deviceNetworkSystem = default!;
|
||||
private EntityUid _sourceEntity;
|
||||
private EntityUid _sourceWirelessEntity;
|
||||
private readonly List<EntityUid> _targetEntities = new();
|
||||
private readonly List<EntityUid> _targetWirelessEntities = new();
|
||||
|
||||
|
||||
private NetworkPayload _payload = default!;
|
||||
|
||||
[TestPrototypes]
|
||||
private const string Prototypes = @"
|
||||
- type: entity
|
||||
name: DummyNetworkDevicePrivate
|
||||
id: DummyNetworkDevicePrivate
|
||||
components:
|
||||
- type: DeviceNetwork
|
||||
transmitFrequency: 100
|
||||
receiveFrequency: 100
|
||||
deviceNetId: Private
|
||||
- type: entity
|
||||
name: DummyWirelessNetworkDevice
|
||||
id: DummyWirelessNetworkDevice
|
||||
components:
|
||||
- type: DeviceNetwork
|
||||
transmitFrequency: 100
|
||||
receiveFrequency: 100
|
||||
deviceNetId: Wireless
|
||||
- type: WirelessNetworkConnection
|
||||
range: 100
|
||||
";
|
||||
|
||||
//public static IEnumerable<int> EntityCountSource { get; set; }
|
||||
|
||||
//[ParamsSource(nameof(EntityCountSource))]
|
||||
public int EntityCount = 500;
|
||||
|
||||
[GlobalSetup]
|
||||
public async Task SetupAsync()
|
||||
{
|
||||
ProgramShared.PathOffset = "../../../../";
|
||||
PoolManager.Startup(typeof(DeviceNetworkingBenchmark).Assembly);
|
||||
_pair = await PoolManager.GetServerClient();
|
||||
var server = _pair.Server;
|
||||
|
||||
await server.WaitPost(() =>
|
||||
{
|
||||
var entityManager = server.InstanceDependencyCollection.Resolve<IEntityManager>();
|
||||
_deviceNetworkSystem = entityManager.EntitySysManager.GetEntitySystem<DeviceNetworkSystem>();
|
||||
_deviceNetTestSystem = entityManager.EntitySysManager.GetEntitySystem<DeviceNetworkTestSystem>();
|
||||
|
||||
var testValue = "test";
|
||||
_payload = new NetworkPayload
|
||||
{
|
||||
["Test"] = testValue,
|
||||
["testnumber"] = 1,
|
||||
["testbool"] = true
|
||||
};
|
||||
|
||||
_sourceEntity = entityManager.SpawnEntity("DummyNetworkDevicePrivate", MapCoordinates.Nullspace);
|
||||
_sourceWirelessEntity = entityManager.SpawnEntity("DummyWirelessNetworkDevice", MapCoordinates.Nullspace);
|
||||
|
||||
for (var i = 0; i < EntityCount; i++)
|
||||
{
|
||||
_targetEntities.Add(entityManager.SpawnEntity("DummyNetworkDevicePrivate", MapCoordinates.Nullspace));
|
||||
_targetWirelessEntities.Add(entityManager.SpawnEntity("DummyWirelessNetworkDevice", MapCoordinates.Nullspace));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
[GlobalCleanup]
|
||||
public async Task Cleanup()
|
||||
{
|
||||
await _pair.DisposeAsync();
|
||||
PoolManager.Shutdown();
|
||||
}
|
||||
|
||||
[Benchmark(Baseline = true, Description = "Entity Events")]
|
||||
public async Task EventSentBaseline()
|
||||
{
|
||||
var server = _pair.Server;
|
||||
|
||||
_pair.Server.Post(() =>
|
||||
{
|
||||
foreach (var entity in _targetEntities)
|
||||
{
|
||||
_deviceNetTestSystem.SendBaselineTestEvent(entity);
|
||||
}
|
||||
});
|
||||
|
||||
await server.WaitRunTicks(1);
|
||||
await server.WaitIdleAsync();
|
||||
}
|
||||
|
||||
[Benchmark(Description = "Device Net Broadcast No Connection Checks")]
|
||||
public async Task DeviceNetworkBroadcastNoConnectionChecks()
|
||||
{
|
||||
var server = _pair.Server;
|
||||
|
||||
_pair.Server.Post(() =>
|
||||
{
|
||||
_deviceNetworkSystem.QueuePacket(_sourceEntity, null, _payload, 100);
|
||||
});
|
||||
|
||||
await server.WaitRunTicks(1);
|
||||
await server.WaitIdleAsync();
|
||||
}
|
||||
|
||||
[Benchmark(Description = "Device Net Broadcast Wireless Connection Checks")]
|
||||
public async Task DeviceNetworkBroadcastWirelessConnectionChecks()
|
||||
{
|
||||
var server = _pair.Server;
|
||||
|
||||
_pair.Server.Post(() =>
|
||||
{
|
||||
_deviceNetworkSystem.QueuePacket(_sourceWirelessEntity, null, _payload, 100);
|
||||
});
|
||||
|
||||
await server.WaitRunTicks(1);
|
||||
await server.WaitIdleAsync();
|
||||
}
|
||||
}
|
||||
68
Content.Benchmarks/DynamicTreeBenchmark.cs
Normal file
68
Content.Benchmarks/DynamicTreeBenchmark.cs
Normal file
@ -0,0 +1,68 @@
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.Maths;
|
||||
using Robust.Shared.Physics;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
[SimpleJob, MemoryDiagnoser]
|
||||
[Virtual]
|
||||
public class DynamicTreeBenchmark
|
||||
{
|
||||
private static readonly Box2[] Aabbs1 =
|
||||
{
|
||||
((Box2) default).Enlarged(1), //2x2 square
|
||||
((Box2) default).Enlarged(2), //4x4 square
|
||||
new(-3, 3, -3, 3), // point off to the bottom left
|
||||
new(-3, -3, -3, -3), // point off to the top left
|
||||
new(3, 3, 3, 3), // point off to the bottom right
|
||||
new(3, -3, 3, -3), // point off to the top right
|
||||
((Box2) default).Enlarged(1), //2x2 square
|
||||
((Box2) default).Enlarged(2), //4x4 square
|
||||
((Box2) default).Enlarged(1), //2x2 square
|
||||
((Box2) default).Enlarged(2), //4x4 square
|
||||
((Box2) default).Enlarged(1), //2x2 square
|
||||
((Box2) default).Enlarged(2), //4x4 square
|
||||
((Box2) default).Enlarged(1), //2x2 square
|
||||
((Box2) default).Enlarged(2), //4x4 square
|
||||
((Box2) default).Enlarged(3), //6x6 square
|
||||
new(-3, 3, -3, 3), // point off to the bottom left
|
||||
new(-3, -3, -3, -3), // point off to the top left
|
||||
new(3, 3, 3, 3), // point off to the bottom right
|
||||
new(3, -3, 3, -3), // point off to the top right
|
||||
};
|
||||
|
||||
private B2DynamicTree<int> _b2Tree;
|
||||
private DynamicTree<int> _tree;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_b2Tree = new B2DynamicTree<int>();
|
||||
_tree = new DynamicTree<int>((in int value) => Aabbs1[value], capacity: 16);
|
||||
|
||||
for (var i = 0; i < Aabbs1.Length; i++)
|
||||
{
|
||||
var aabb = Aabbs1[i];
|
||||
_b2Tree.CreateProxy(aabb, uint.MaxValue, i);
|
||||
_tree.Add(i);
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchB2()
|
||||
{
|
||||
object state = null;
|
||||
_b2Tree.Query(ref state, (ref object _, DynamicTree.Proxy __) => true, new Box2(-1, -1, 1, 1));
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchQ()
|
||||
{
|
||||
foreach (var _ in _tree.QueryAabb(new Box2(-1, -1, 1, 1), true))
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
320
Content.Benchmarks/EntityFetchBenchmark.cs
Normal file
320
Content.Benchmarks/EntityFetchBenchmark.cs
Normal file
@ -0,0 +1,320 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.Utility;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
[SimpleJob]
|
||||
[Virtual]
|
||||
public class EntityFetchBenchmark
|
||||
{
|
||||
[Params(1000)] public int N { get; set; }
|
||||
|
||||
public int M { get; set; } = 10;
|
||||
|
||||
private readonly DictEntityStorage _dictStorage = new();
|
||||
private readonly GenEntityStorage _genStorage = new();
|
||||
|
||||
private IEntityStorage<DictEntity, DictEntityUid> _dictStorageInterface;
|
||||
private IEntityStorage<GenEntity, GenEntityUid> _genStorageInterface;
|
||||
|
||||
private DictEntityUid[] _toReadDict;
|
||||
private DictEntity[] _toWriteDict;
|
||||
|
||||
private GenEntityUid[] _toReadGen;
|
||||
private GenEntity[] _toWriteGen;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_dictStorageInterface = _dictStorage;
|
||||
_genStorageInterface = _genStorage;
|
||||
|
||||
var r = new Random();
|
||||
|
||||
var allocatedGen = new List<GenEntity>();
|
||||
var allocatedDict = new List<DictEntity>();
|
||||
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
allocatedGen.Add(_genStorage.NewEntity());
|
||||
allocatedDict.Add(_dictStorage.NewEntity());
|
||||
}
|
||||
|
||||
var delTo = N / 2;
|
||||
for (var i = 0; i < delTo; i++)
|
||||
{
|
||||
var index = r.Next(allocatedDict.Count);
|
||||
|
||||
var gEnt = allocatedGen[index];
|
||||
var dEnt = allocatedDict[index];
|
||||
|
||||
_genStorage.DeleteEntity(gEnt);
|
||||
_dictStorage.DeleteEntity(dEnt);
|
||||
|
||||
allocatedGen.RemoveSwap(i);
|
||||
allocatedDict.RemoveSwap(i);
|
||||
}
|
||||
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
allocatedGen.Add(_genStorage.NewEntity());
|
||||
allocatedDict.Add(_dictStorage.NewEntity());
|
||||
}
|
||||
|
||||
for (var i = 0; i < delTo; i++)
|
||||
{
|
||||
var index = r.Next(allocatedDict.Count);
|
||||
|
||||
var gEnt = allocatedGen[index];
|
||||
var dEnt = allocatedDict[index];
|
||||
|
||||
_genStorage.DeleteEntity(gEnt);
|
||||
_dictStorage.DeleteEntity(dEnt);
|
||||
|
||||
allocatedGen.RemoveSwap(i);
|
||||
allocatedDict.RemoveSwap(i);
|
||||
}
|
||||
|
||||
_toReadDict = new DictEntityUid[M];
|
||||
_toWriteDict = new DictEntity[M];
|
||||
_toReadGen = new GenEntityUid[M];
|
||||
_toWriteGen = new GenEntity[M];
|
||||
|
||||
for (var i = 0; i < M; i++)
|
||||
{
|
||||
var index = r.Next(allocatedDict.Count);
|
||||
|
||||
_toReadDict[i] = allocatedDict[index].Uid;
|
||||
_toReadGen[i] = allocatedGen[index].Uid;
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchGenId()
|
||||
{
|
||||
for (var i = 0; i < M; i++)
|
||||
{
|
||||
var uid = _toReadGen[i];
|
||||
if (_genStorage.TryGetEntity(uid, out var entity))
|
||||
{
|
||||
_toWriteGen[i] = entity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchDict()
|
||||
{
|
||||
for (var i = 0; i < M; i++)
|
||||
{
|
||||
var uid = _toReadDict[i];
|
||||
if (_dictStorage.TryGetEntity(uid, out var entity))
|
||||
{
|
||||
_toWriteDict[i] = entity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchGenIdInterface()
|
||||
{
|
||||
for (var i = 0; i < M; i++)
|
||||
{
|
||||
var uid = _toReadGen[i];
|
||||
if (_genStorageInterface.TryGetEntity(uid, out var entity))
|
||||
{
|
||||
_toWriteGen[i] = entity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchDictInterface()
|
||||
{
|
||||
for (var i = 0; i < M; i++)
|
||||
{
|
||||
var uid = _toReadDict[i];
|
||||
if (_dictStorageInterface.TryGetEntity(uid, out var entity))
|
||||
{
|
||||
_toWriteDict[i] = entity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class DictEntityStorage : EntityStorage<DictEntity, DictEntityUid>
|
||||
{
|
||||
private int _nextValue;
|
||||
|
||||
private readonly Dictionary<DictEntityUid, DictEntity> _dict = new();
|
||||
|
||||
public override bool TryGetEntity(DictEntityUid entityUid, out DictEntity entity)
|
||||
{
|
||||
if (!_dict.TryGetValue(entityUid, out entity))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return !entity.Deleted;
|
||||
}
|
||||
|
||||
public DictEntity NewEntity()
|
||||
{
|
||||
var e = new DictEntity(new DictEntityUid(_nextValue++));
|
||||
_dict.Add(e.Uid, e);
|
||||
return e;
|
||||
}
|
||||
|
||||
public void DeleteEntity(DictEntity e)
|
||||
{
|
||||
DebugTools.Assert(!e.Deleted);
|
||||
|
||||
e.Deleted = true;
|
||||
|
||||
_dict.Remove(e.Uid);
|
||||
}
|
||||
}
|
||||
|
||||
private interface IEntityStorage<TEntity, TEntityUid>
|
||||
{
|
||||
public bool TryGetEntity(TEntityUid entityUid, out TEntity entity);
|
||||
}
|
||||
|
||||
private abstract class EntityStorage<TEntity, TEntityUid> : IEntityStorage<TEntity, TEntityUid>
|
||||
{
|
||||
public abstract bool TryGetEntity(TEntityUid entityUid, out TEntity entity);
|
||||
|
||||
public TEntity GetEntity(TEntityUid entityUid)
|
||||
{
|
||||
if (!TryGetEntity(entityUid, out var entity))
|
||||
throw new ArgumentException($"Failed to get entity {entityUid} from storage.");
|
||||
|
||||
return entity;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class GenEntityStorage : EntityStorage<GenEntity, GenEntityUid>
|
||||
{
|
||||
private (int generation, GenEntity entity)[] _entities = new (int, GenEntity)[1];
|
||||
private readonly List<int> _availableSlots = new() { 0 };
|
||||
|
||||
public override bool TryGetEntity(GenEntityUid entityUid, out GenEntity entity)
|
||||
{
|
||||
var (generation, genEntity) = _entities[entityUid.Index];
|
||||
entity = genEntity;
|
||||
|
||||
return generation == entityUid.Generation;
|
||||
}
|
||||
|
||||
public GenEntity NewEntity()
|
||||
{
|
||||
if (_availableSlots.Count == 0)
|
||||
{
|
||||
// Reallocate
|
||||
var oldEntities = _entities;
|
||||
_entities = new (int, GenEntity)[_entities.Length * 2];
|
||||
oldEntities.CopyTo(_entities, 0);
|
||||
|
||||
for (var i = oldEntities.Length; i < _entities.Length; i++)
|
||||
{
|
||||
_availableSlots.Add(i);
|
||||
}
|
||||
}
|
||||
|
||||
var index = _availableSlots.Pop();
|
||||
ref var slot = ref _entities[index];
|
||||
var slotEntity = new GenEntity(new GenEntityUid(slot.generation, index));
|
||||
slot.entity = slotEntity;
|
||||
|
||||
return slotEntity;
|
||||
}
|
||||
|
||||
public void DeleteEntity(GenEntity e)
|
||||
{
|
||||
DebugTools.Assert(!e.Deleted);
|
||||
|
||||
e.Deleted = true;
|
||||
|
||||
ref var slot = ref _entities[e.Uid.Index];
|
||||
slot.entity = null;
|
||||
slot.generation += 1;
|
||||
|
||||
_availableSlots.Add(e.Uid.Index);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private readonly struct DictEntityUid : IEquatable<DictEntityUid>
|
||||
{
|
||||
public readonly int Value;
|
||||
|
||||
public DictEntityUid(int value)
|
||||
{
|
||||
Value = value;
|
||||
}
|
||||
|
||||
public bool Equals(DictEntityUid other)
|
||||
{
|
||||
return Value == other.Value;
|
||||
}
|
||||
|
||||
public override bool Equals(object obj)
|
||||
{
|
||||
return obj is DictEntityUid other && Equals(other);
|
||||
}
|
||||
|
||||
public override int GetHashCode()
|
||||
{
|
||||
return Value;
|
||||
}
|
||||
|
||||
public static bool operator ==(DictEntityUid left, DictEntityUid right)
|
||||
{
|
||||
return left.Equals(right);
|
||||
}
|
||||
|
||||
public static bool operator !=(DictEntityUid left, DictEntityUid right)
|
||||
{
|
||||
return !left.Equals(right);
|
||||
}
|
||||
}
|
||||
|
||||
private readonly struct GenEntityUid
|
||||
{
|
||||
public readonly int Generation;
|
||||
public readonly int Index;
|
||||
|
||||
public GenEntityUid(int generation, int index)
|
||||
{
|
||||
Generation = generation;
|
||||
Index = index;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class DictEntity
|
||||
{
|
||||
public DictEntity(DictEntityUid uid)
|
||||
{
|
||||
Uid = uid;
|
||||
}
|
||||
|
||||
public DictEntityUid Uid { get; }
|
||||
|
||||
public bool Deleted { get; set; }
|
||||
}
|
||||
|
||||
private sealed class GenEntity
|
||||
{
|
||||
public GenEntityUid Uid { get; }
|
||||
|
||||
public bool Deleted { get; set; }
|
||||
|
||||
public GenEntity(GenEntityUid uid)
|
||||
{
|
||||
Uid = uid;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
96
Content.Benchmarks/EntityManagerGetAllComponents.cs
Normal file
96
Content.Benchmarks/EntityManagerGetAllComponents.cs
Normal file
@ -0,0 +1,96 @@
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Moq;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.Exceptions;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.IoC;
|
||||
using Robust.Shared.Log;
|
||||
using Robust.Shared.Map;
|
||||
using Robust.Shared.Reflection;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
[Virtual]
|
||||
public partial class EntityManagerGetAllComponents
|
||||
{
|
||||
private IEntityManager _entityManager;
|
||||
|
||||
[Params(5000)] public int N { get; set; }
|
||||
|
||||
public static void TestRun()
|
||||
{
|
||||
var x = new EntityManagerGetAllComponents
|
||||
{
|
||||
N = 500
|
||||
};
|
||||
x.Setup();
|
||||
x.Run();
|
||||
}
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
// Initialize component manager.
|
||||
IoCManager.InitThread();
|
||||
|
||||
IoCManager.Register<IEntityManager, EntityManager>();
|
||||
IoCManager.Register<IRuntimeLog, RuntimeLog>();
|
||||
IoCManager.Register<ILogManager, LogManager>();
|
||||
IoCManager.Register<IDynamicTypeFactory, DynamicTypeFactory>();
|
||||
IoCManager.Register<IEntitySystemManager, EntitySystemManager>();
|
||||
IoCManager.RegisterInstance<IReflectionManager>(new Mock<IReflectionManager>().Object);
|
||||
|
||||
var dummyReg = new ComponentRegistration(
|
||||
"Dummy",
|
||||
typeof(DummyComponent),
|
||||
CompIdx.Index<DummyComponent>());
|
||||
|
||||
var componentFactory = new Mock<IComponentFactory>();
|
||||
componentFactory.Setup(p => p.GetComponent<DummyComponent>()).Returns(new DummyComponent());
|
||||
componentFactory.Setup(m => m.GetIndex(typeof(DummyComponent))).Returns(CompIdx.Index<DummyComponent>());
|
||||
componentFactory.Setup(p => p.GetRegistration(It.IsAny<DummyComponent>())).Returns(dummyReg);
|
||||
componentFactory.Setup(p => p.GetAllRegistrations()).Returns(new[] { dummyReg });
|
||||
componentFactory.Setup(p => p.GetAllRefTypes()).Returns(new[] { CompIdx.Index<DummyComponent>() });
|
||||
|
||||
IoCManager.RegisterInstance<IComponentFactory>(componentFactory.Object);
|
||||
|
||||
IoCManager.BuildGraph();
|
||||
_entityManager = IoCManager.Resolve<IEntityManager>();
|
||||
_entityManager.Initialize();
|
||||
|
||||
// Initialize N entities with one component.
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
var entity = _entityManager.SpawnEntity(null, EntityCoordinates.Invalid);
|
||||
_entityManager.AddComponent<DummyComponent>(entity);
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public int Run()
|
||||
{
|
||||
var count = 0;
|
||||
|
||||
foreach (var _ in _entityManager.EntityQuery<DummyComponent>(true))
|
||||
{
|
||||
count += 1;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public int Noop()
|
||||
{
|
||||
var count = 0;
|
||||
|
||||
_entityManager.TryGetComponent(default, out DummyComponent _);
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
private sealed partial class DummyComponent : Component
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
79
Content.Benchmarks/MapLoadBenchmark.cs
Normal file
79
Content.Benchmarks/MapLoadBenchmark.cs
Normal file
@ -0,0 +1,79 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Content.IntegrationTests;
|
||||
using Content.IntegrationTests.Pair;
|
||||
using Content.Server.Maps;
|
||||
using Robust.Shared;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.EntitySerialization.Systems;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.Map;
|
||||
using Robust.Shared.Prototypes;
|
||||
using Robust.Shared.Utility;
|
||||
|
||||
namespace Content.Benchmarks;
|
||||
|
||||
[Virtual]
|
||||
public class MapLoadBenchmark
|
||||
{
|
||||
private TestPair _pair = default!;
|
||||
private MapLoaderSystem _mapLoader = default!;
|
||||
private SharedMapSystem _mapSys = default!;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
ProgramShared.PathOffset = "../../../../";
|
||||
PoolManager.Startup();
|
||||
|
||||
_pair = PoolManager.GetServerClient().GetAwaiter().GetResult();
|
||||
var server = _pair.Server;
|
||||
|
||||
Paths = server.ResolveDependency<IPrototypeManager>()
|
||||
.EnumeratePrototypes<GameMapPrototype>()
|
||||
.ToDictionary(x => x.ID, x => x.MapPath.ToString());
|
||||
|
||||
_mapLoader = server.ResolveDependency<IEntitySystemManager>().GetEntitySystem<MapLoaderSystem>();
|
||||
_mapSys = server.ResolveDependency<IEntitySystemManager>().GetEntitySystem<SharedMapSystem>();
|
||||
}
|
||||
|
||||
[GlobalCleanup]
|
||||
public async Task Cleanup()
|
||||
{
|
||||
await _pair.DisposeAsync();
|
||||
PoolManager.Shutdown();
|
||||
}
|
||||
|
||||
public static readonly string[] MapsSource = { "Empty", "Saltern", "Box", "Bagel", "Dev", "CentComm", "Core", "TestTeg", "Packed", "Omega", "Reach", "Meta", "Marathon", "MeteorArena", "Fland", "Oasis", "Convex"};
|
||||
|
||||
[ParamsSource(nameof(MapsSource))]
|
||||
public string Map;
|
||||
|
||||
public Dictionary<string, string> Paths;
|
||||
private MapId _mapId;
|
||||
|
||||
[Benchmark]
|
||||
public async Task LoadMap()
|
||||
{
|
||||
var mapPath = new ResPath(Paths[Map]);
|
||||
var server = _pair.Server;
|
||||
await server.WaitPost(() =>
|
||||
{
|
||||
var success = _mapLoader.TryLoadMap(mapPath, out var map, out _);
|
||||
if (!success)
|
||||
throw new Exception("Map load failed");
|
||||
_mapId = map.Value.Comp.MapId;
|
||||
});
|
||||
}
|
||||
|
||||
[IterationCleanup]
|
||||
public void IterationCleanup()
|
||||
{
|
||||
var server = _pair.Server;
|
||||
server.WaitPost(() => _mapSys.DeleteMap(_mapId))
|
||||
.Wait();
|
||||
}
|
||||
}
|
||||
265
Content.Benchmarks/NetSerializerIntBenchmark.cs
Normal file
265
Content.Benchmarks/NetSerializerIntBenchmark.cs
Normal file
@ -0,0 +1,265 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Robust.Shared.Analyzers;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
[SimpleJob]
|
||||
[Virtual]
|
||||
public class NetSerializerIntBenchmark
|
||||
{
|
||||
private MemoryStream _writeStream;
|
||||
private MemoryStream _readStream;
|
||||
private readonly ushort _x16 = 5;
|
||||
private readonly uint _x32 = 5;
|
||||
private readonly ulong _x64 = 5;
|
||||
private ushort _read16;
|
||||
private uint _read32;
|
||||
private ulong _read64;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_writeStream = new MemoryStream(64);
|
||||
_readStream = new MemoryStream();
|
||||
_readStream.Write(new byte[] { 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8 });
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWrite16Span()
|
||||
{
|
||||
_writeStream.Position = 0;
|
||||
WriteUInt16Span(_writeStream, _x16);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWrite32Span()
|
||||
{
|
||||
_writeStream.Position = 0;
|
||||
WriteUInt32Span(_writeStream, _x32);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWrite64Span()
|
||||
{
|
||||
_writeStream.Position = 0;
|
||||
WriteUInt64Span(_writeStream, _x64);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchRead16Span()
|
||||
{
|
||||
_readStream.Position = 0;
|
||||
_read16 = ReadUInt16Span(_readStream);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchRead32Span()
|
||||
{
|
||||
_readStream.Position = 0;
|
||||
_read32 = ReadUInt32Span(_readStream);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchRead64Span()
|
||||
{
|
||||
_readStream.Position = 0;
|
||||
_read64 = ReadUInt64Span(_readStream);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWrite16Byte()
|
||||
{
|
||||
_writeStream.Position = 0;
|
||||
WriteUInt16Byte(_writeStream, _x16);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWrite32Byte()
|
||||
{
|
||||
_writeStream.Position = 0;
|
||||
WriteUInt32Byte(_writeStream, _x32);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWrite64Byte()
|
||||
{
|
||||
_writeStream.Position = 0;
|
||||
WriteUInt64Byte(_writeStream, _x64);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchRead16Byte()
|
||||
{
|
||||
_readStream.Position = 0;
|
||||
_read16 = ReadUInt16Byte(_readStream);
|
||||
}
|
||||
[Benchmark]
|
||||
public void BenchRead32Byte()
|
||||
{
|
||||
_readStream.Position = 0;
|
||||
_read32 = ReadUInt32Byte(_readStream);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchRead64Byte()
|
||||
{
|
||||
_readStream.Position = 0;
|
||||
_read64 = ReadUInt64Byte(_readStream);
|
||||
}
|
||||
|
||||
private static void WriteUInt16Byte(Stream stream, ushort value)
|
||||
{
|
||||
stream.WriteByte((byte) value);
|
||||
stream.WriteByte((byte) (value >> 8));
|
||||
}
|
||||
|
||||
private static void WriteUInt32Byte(Stream stream, uint value)
|
||||
{
|
||||
stream.WriteByte((byte) value);
|
||||
stream.WriteByte((byte) (value >> 8));
|
||||
stream.WriteByte((byte) (value >> 16));
|
||||
stream.WriteByte((byte) (value >> 24));
|
||||
}
|
||||
|
||||
private static void WriteUInt64Byte(Stream stream, ulong value)
|
||||
{
|
||||
stream.WriteByte((byte) value);
|
||||
stream.WriteByte((byte) (value >> 8));
|
||||
stream.WriteByte((byte) (value >> 16));
|
||||
stream.WriteByte((byte) (value >> 24));
|
||||
stream.WriteByte((byte) (value >> 32));
|
||||
stream.WriteByte((byte) (value >> 40));
|
||||
stream.WriteByte((byte) (value >> 48));
|
||||
stream.WriteByte((byte) (value >> 56));
|
||||
}
|
||||
|
||||
private static ushort ReadUInt16Byte(Stream stream)
|
||||
{
|
||||
ushort a = 0;
|
||||
|
||||
for (var i = 0; i < 16; i += 8)
|
||||
{
|
||||
var val = stream.ReadByte();
|
||||
if (val == -1)
|
||||
throw new EndOfStreamException();
|
||||
|
||||
a |= (ushort) (val << i);
|
||||
}
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
private static uint ReadUInt32Byte(Stream stream)
|
||||
{
|
||||
uint a = 0;
|
||||
|
||||
for (var i = 0; i < 32; i += 8)
|
||||
{
|
||||
var val = stream.ReadByte();
|
||||
if (val == -1)
|
||||
throw new EndOfStreamException();
|
||||
|
||||
a |= (uint) val << i;
|
||||
}
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
private static ulong ReadUInt64Byte(Stream stream)
|
||||
{
|
||||
ulong a = 0;
|
||||
|
||||
for (var i = 0; i < 64; i += 8)
|
||||
{
|
||||
var val = stream.ReadByte();
|
||||
if (val == -1)
|
||||
throw new EndOfStreamException();
|
||||
|
||||
a |= (ulong) val << i;
|
||||
}
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
private static void WriteUInt16Span(Stream stream, ushort value)
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[2];
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(buf, value);
|
||||
|
||||
stream.Write(buf);
|
||||
}
|
||||
|
||||
private static void WriteUInt32Span(Stream stream, uint value)
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buf, value);
|
||||
|
||||
stream.Write(buf);
|
||||
}
|
||||
|
||||
private static void WriteUInt64Span(Stream stream, ulong value)
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[8];
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(buf, value);
|
||||
|
||||
stream.Write(buf);
|
||||
}
|
||||
|
||||
private static ushort ReadUInt16Span(Stream stream)
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[2];
|
||||
var wSpan = buf;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var read = stream.Read(wSpan);
|
||||
if (read == 0)
|
||||
throw new EndOfStreamException();
|
||||
if (read == wSpan.Length)
|
||||
break;
|
||||
wSpan = wSpan[read..];
|
||||
}
|
||||
|
||||
return BinaryPrimitives.ReadUInt16LittleEndian(buf);
|
||||
}
|
||||
|
||||
private static uint ReadUInt32Span(Stream stream)
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[4];
|
||||
var wSpan = buf;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var read = stream.Read(wSpan);
|
||||
if (read == 0)
|
||||
throw new EndOfStreamException();
|
||||
if (read == wSpan.Length)
|
||||
break;
|
||||
wSpan = wSpan[read..];
|
||||
}
|
||||
|
||||
return BinaryPrimitives.ReadUInt32LittleEndian(buf);
|
||||
}
|
||||
|
||||
private static ulong ReadUInt64Span(Stream stream)
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[8];
|
||||
var wSpan = buf;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var read = stream.Read(wSpan);
|
||||
if (read == 0)
|
||||
throw new EndOfStreamException();
|
||||
if (read == wSpan.Length)
|
||||
break;
|
||||
wSpan = wSpan[read..];
|
||||
}
|
||||
|
||||
return BinaryPrimitives.ReadUInt64LittleEndian(buf);
|
||||
}
|
||||
}
|
||||
}
|
||||
431
Content.Benchmarks/NetSerializerStringBenchmark.cs
Normal file
431
Content.Benchmarks/NetSerializerStringBenchmark.cs
Normal file
@ -0,0 +1,431 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Text.Unicode;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Lidgren.Network;
|
||||
using NetSerializer;
|
||||
using Robust.Shared.Analyzers;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
// Code for the *Slow and *Unsafe implementations taken from NetSerializer, licensed under the MIT license.
|
||||
|
||||
[MemoryDiagnoser]
|
||||
[Virtual]
|
||||
public class NetSerializerStringBenchmark
|
||||
{
|
||||
private const int StringByteBufferLength = 256;
|
||||
private const int StringCharBufferLength = 128;
|
||||
|
||||
private string _toSerialize;
|
||||
|
||||
[Params(8, 64, 256, 1024)]
|
||||
public int StringLength { get; set; }
|
||||
|
||||
private readonly MemoryStream _outputStream = new(2048);
|
||||
private readonly MemoryStream _inputStream = new(2048);
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[StringLength / 2];
|
||||
new Random().NextBytes(buf);
|
||||
_toSerialize = NetUtility.ToHexString(buf);
|
||||
Primitives.WritePrimitive(_inputStream, _toSerialize);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWriteCore()
|
||||
{
|
||||
_outputStream.Position = 0;
|
||||
WritePrimitiveCore(_outputStream, _toSerialize);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchReadCore()
|
||||
{
|
||||
_inputStream.Position = 0;
|
||||
ReadPrimitiveCore(_inputStream, out _);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWriteUnsafe()
|
||||
{
|
||||
_outputStream.Position = 0;
|
||||
WritePrimitiveUnsafe(_outputStream, _toSerialize);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchReadUnsafe()
|
||||
{
|
||||
_inputStream.Position = 0;
|
||||
ReadPrimitiveUnsafe(_inputStream, out _);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchWriteSlow()
|
||||
{
|
||||
_outputStream.Position = 0;
|
||||
WritePrimitiveSlow(_outputStream, _toSerialize);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchReadSlow()
|
||||
{
|
||||
_inputStream.Position = 0;
|
||||
ReadPrimitiveSlow(_inputStream, out _);
|
||||
}
|
||||
|
||||
public static void WritePrimitiveCore(Stream stream, string value)
|
||||
{
|
||||
if (value == null)
|
||||
{
|
||||
Primitives.WritePrimitive(stream, (uint) 0);
|
||||
return;
|
||||
}
|
||||
|
||||
if (value.Length == 0)
|
||||
{
|
||||
Primitives.WritePrimitive(stream, (uint) 1);
|
||||
return;
|
||||
}
|
||||
|
||||
Span<byte> buf = stackalloc byte[StringByteBufferLength];
|
||||
|
||||
var totalChars = value.Length;
|
||||
var totalBytes = Encoding.UTF8.GetByteCount(value);
|
||||
|
||||
Primitives.WritePrimitive(stream, (uint) totalBytes + 1);
|
||||
Primitives.WritePrimitive(stream, (uint) totalChars);
|
||||
|
||||
var totalRead = 0;
|
||||
ReadOnlySpan<char> span = value;
|
||||
while (true)
|
||||
{
|
||||
var finalChunk = totalRead + totalChars >= totalChars;
|
||||
Utf8.FromUtf16(span, buf, out var read, out var wrote, isFinalBlock: finalChunk);
|
||||
stream.Write(buf[0..wrote]);
|
||||
totalRead += read;
|
||||
if (read >= totalChars)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
span = span[read..];
|
||||
totalChars -= read;
|
||||
}
|
||||
}
|
||||
|
||||
public static void ReadPrimitiveCore(Stream stream, out string value)
|
||||
{
|
||||
Primitives.ReadPrimitive(stream, out uint totalBytes);
|
||||
|
||||
if (totalBytes == 0)
|
||||
{
|
||||
value = null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (totalBytes == 1)
|
||||
{
|
||||
value = string.Empty;
|
||||
return;
|
||||
}
|
||||
|
||||
totalBytes -= 1;
|
||||
|
||||
Primitives.ReadPrimitive(stream, out uint totalChars);
|
||||
|
||||
value = string.Create((int) totalChars, ((int) totalBytes, stream), StringSpanRead);
|
||||
}
|
||||
|
||||
private static void StringSpanRead(Span<char> span, (int totalBytes, Stream stream) tuple)
|
||||
{
|
||||
Span<byte> buf = stackalloc byte[StringByteBufferLength];
|
||||
|
||||
// ReSharper disable VariableHidesOuterVariable
|
||||
var (totalBytes, stream) = tuple;
|
||||
// ReSharper restore VariableHidesOuterVariable
|
||||
|
||||
var totalBytesRead = 0;
|
||||
var totalCharsRead = 0;
|
||||
var writeBufStart = 0;
|
||||
|
||||
while (totalBytesRead < totalBytes)
|
||||
{
|
||||
var bytesLeft = totalBytes - totalBytesRead;
|
||||
var bytesReadLeft = Math.Min(buf.Length, bytesLeft);
|
||||
var writeSlice = buf[writeBufStart..(bytesReadLeft - writeBufStart)];
|
||||
var bytesInBuffer = stream.Read(writeSlice);
|
||||
if (bytesInBuffer == 0) throw new EndOfStreamException();
|
||||
|
||||
var readFromStream = bytesInBuffer + writeBufStart;
|
||||
var final = readFromStream == bytesLeft;
|
||||
var status = Utf8.ToUtf16(buf[..readFromStream], span[totalCharsRead..], out var bytesRead, out var charsRead, isFinalBlock: final);
|
||||
|
||||
totalBytesRead += bytesRead;
|
||||
totalCharsRead += charsRead;
|
||||
writeBufStart = 0;
|
||||
|
||||
if (status == OperationStatus.DestinationTooSmall)
|
||||
{
|
||||
// Malformed data?
|
||||
throw new InvalidDataException();
|
||||
}
|
||||
|
||||
if (status == OperationStatus.NeedMoreData)
|
||||
{
|
||||
// We got cut short in the middle of a multi-byte UTF-8 sequence.
|
||||
// So we need to move it to the bottom of the span, then read the next bit *past* that.
|
||||
// This copy should be fine because we're only ever gonna be copying up to 4 bytes
|
||||
// from the end of the buffer to the start.
|
||||
// So no chance of overlap.
|
||||
buf[bytesRead..].CopyTo(buf);
|
||||
writeBufStart = bytesReadLeft - bytesRead;
|
||||
continue;
|
||||
}
|
||||
|
||||
Debug.Assert(status == OperationStatus.Done);
|
||||
}
|
||||
}
|
||||
|
||||
public static void WritePrimitiveSlow(Stream stream, string value)
|
||||
{
|
||||
if (value == null)
|
||||
{
|
||||
Primitives.WritePrimitive(stream, (uint) 0);
|
||||
return;
|
||||
}
|
||||
else if (value.Length == 0)
|
||||
{
|
||||
Primitives.WritePrimitive(stream, (uint) 1);
|
||||
return;
|
||||
}
|
||||
|
||||
var encoding = new UTF8Encoding(false, true);
|
||||
|
||||
var len = encoding.GetByteCount(value);
|
||||
|
||||
Primitives.WritePrimitive(stream, (uint) len + 1);
|
||||
Primitives.WritePrimitive(stream, (uint) value.Length);
|
||||
|
||||
var buf = new byte[len];
|
||||
|
||||
encoding.GetBytes(value, 0, value.Length, buf, 0);
|
||||
|
||||
stream.Write(buf, 0, len);
|
||||
}
|
||||
|
||||
public static void ReadPrimitiveSlow(Stream stream, out string value)
|
||||
{
|
||||
Primitives.ReadPrimitive(stream, out uint len);
|
||||
|
||||
if (len == 0)
|
||||
{
|
||||
value = null;
|
||||
return;
|
||||
}
|
||||
else if (len == 1)
|
||||
{
|
||||
value = string.Empty;
|
||||
return;
|
||||
}
|
||||
|
||||
Primitives.ReadPrimitive(stream, out uint _);
|
||||
|
||||
len -= 1;
|
||||
|
||||
var encoding = new UTF8Encoding(false, true);
|
||||
|
||||
var buf = new byte[len];
|
||||
|
||||
var l = 0;
|
||||
|
||||
while (l < len)
|
||||
{
|
||||
var r = stream.Read(buf, l, (int) len - l);
|
||||
if (r == 0)
|
||||
throw new EndOfStreamException();
|
||||
l += r;
|
||||
}
|
||||
|
||||
value = encoding.GetString(buf);
|
||||
}
|
||||
|
||||
private sealed class StringHelper
|
||||
{
|
||||
public StringHelper()
|
||||
{
|
||||
Encoding = new UTF8Encoding(false, true);
|
||||
}
|
||||
|
||||
private Encoder _encoder;
|
||||
private Decoder _decoder;
|
||||
|
||||
private byte[] _byteBuffer;
|
||||
private char[] _charBuffer;
|
||||
|
||||
public UTF8Encoding Encoding { get; private set; }
|
||||
public Encoder Encoder
|
||||
{
|
||||
get
|
||||
{
|
||||
_encoder ??= Encoding.GetEncoder();
|
||||
return _encoder;
|
||||
}
|
||||
}
|
||||
public Decoder Decoder
|
||||
{
|
||||
get
|
||||
{
|
||||
_decoder ??= Encoding.GetDecoder();
|
||||
return _decoder;
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] ByteBuffer
|
||||
{
|
||||
get
|
||||
{
|
||||
_byteBuffer ??= new byte[StringByteBufferLength];
|
||||
return _byteBuffer;
|
||||
}
|
||||
}
|
||||
public char[] CharBuffer
|
||||
{
|
||||
get
|
||||
{
|
||||
_charBuffer ??= new char[StringCharBufferLength];
|
||||
return _charBuffer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[ThreadStatic]
|
||||
private static StringHelper _stringHelper;
|
||||
|
||||
public static unsafe void WritePrimitiveUnsafe(Stream stream, string value)
|
||||
{
|
||||
if (value == null)
|
||||
{
|
||||
Primitives.WritePrimitive(stream, (uint) 0);
|
||||
return;
|
||||
}
|
||||
else if (value.Length == 0)
|
||||
{
|
||||
Primitives.WritePrimitive(stream, (uint) 1);
|
||||
return;
|
||||
}
|
||||
|
||||
var helper = _stringHelper;
|
||||
if (helper == null)
|
||||
_stringHelper = helper = new StringHelper();
|
||||
|
||||
var encoder = helper.Encoder;
|
||||
var buf = helper.ByteBuffer;
|
||||
|
||||
var totalChars = value.Length;
|
||||
int totalBytes;
|
||||
|
||||
fixed (char* ptr = value)
|
||||
totalBytes = encoder.GetByteCount(ptr, totalChars, true);
|
||||
|
||||
Primitives.WritePrimitive(stream, (uint) totalBytes + 1);
|
||||
Primitives.WritePrimitive(stream, (uint) totalChars);
|
||||
|
||||
var p = 0;
|
||||
var completed = false;
|
||||
|
||||
while (completed == false)
|
||||
{
|
||||
int charsConverted;
|
||||
int bytesConverted;
|
||||
|
||||
fixed (char* src = value)
|
||||
fixed (byte* dst = buf)
|
||||
{
|
||||
encoder.Convert(src + p, totalChars - p, dst, buf.Length, true,
|
||||
out charsConverted, out bytesConverted, out completed);
|
||||
}
|
||||
|
||||
stream.Write(buf, 0, bytesConverted);
|
||||
|
||||
p += charsConverted;
|
||||
}
|
||||
}
|
||||
|
||||
public static void ReadPrimitiveUnsafe(Stream stream, out string value)
|
||||
{
|
||||
Primitives.ReadPrimitive(stream, out uint totalBytes);
|
||||
|
||||
if (totalBytes == 0)
|
||||
{
|
||||
value = null;
|
||||
return;
|
||||
}
|
||||
else if (totalBytes == 1)
|
||||
{
|
||||
value = string.Empty;
|
||||
return;
|
||||
}
|
||||
|
||||
totalBytes -= 1;
|
||||
|
||||
Primitives.ReadPrimitive(stream, out uint totalChars);
|
||||
|
||||
var helper = _stringHelper;
|
||||
if (helper == null)
|
||||
_stringHelper = helper = new StringHelper();
|
||||
|
||||
var decoder = helper.Decoder;
|
||||
var buf = helper.ByteBuffer;
|
||||
char[] chars;
|
||||
if (totalChars <= StringCharBufferLength)
|
||||
chars = helper.CharBuffer;
|
||||
else
|
||||
chars = new char[totalChars];
|
||||
|
||||
var streamBytesLeft = (int) totalBytes;
|
||||
|
||||
var cp = 0;
|
||||
|
||||
while (streamBytesLeft > 0)
|
||||
{
|
||||
var bytesInBuffer = stream.Read(buf, 0, Math.Min(buf.Length, streamBytesLeft));
|
||||
if (bytesInBuffer == 0)
|
||||
throw new EndOfStreamException();
|
||||
|
||||
streamBytesLeft -= bytesInBuffer;
|
||||
var flush = streamBytesLeft == 0;
|
||||
|
||||
var completed = false;
|
||||
|
||||
var p = 0;
|
||||
|
||||
while (completed == false)
|
||||
{
|
||||
decoder.Convert(
|
||||
buf,
|
||||
p,
|
||||
bytesInBuffer - p,
|
||||
chars,
|
||||
cp,
|
||||
(int) totalChars - cp,
|
||||
flush,
|
||||
out var bytesConverted,
|
||||
out var charsConverted,
|
||||
out completed
|
||||
);
|
||||
|
||||
p += bytesConverted;
|
||||
cp += charsConverted;
|
||||
}
|
||||
}
|
||||
|
||||
value = new string(chars, 0, (int) totalChars);
|
||||
}
|
||||
}
|
||||
}
|
||||
28
Content.Benchmarks/Program.cs
Normal file
28
Content.Benchmarks/Program.cs
Normal file
@ -0,0 +1,28 @@
|
||||
using System;
|
||||
using BenchmarkDotNet.Running;
|
||||
using BenchmarkDotNet.Configs;
|
||||
using Robust.Benchmarks.Configs;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
internal static class Program
|
||||
{
|
||||
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
#if DEBUG
|
||||
Console.ForegroundColor = ConsoleColor.Red;
|
||||
Console.WriteLine("\nWARNING: YOU ARE RUNNING A DEBUG BUILD, USE A RELEASE BUILD FOR AN ACCURATE BENCHMARK");
|
||||
Console.WriteLine("THE DEBUG BUILD IS ONLY GOOD FOR FIXING A CRASHING BENCHMARK\n");
|
||||
var baseConfig = new DebugInProcessConfig();
|
||||
#else
|
||||
var baseConfig = Environment.GetEnvironmentVariable("ROBUST_BENCHMARKS_ENABLE_SQL") != null
|
||||
? DefaultSQLConfig.Instance
|
||||
: DefaultConfig.Instance;
|
||||
#endif
|
||||
var config = ManualConfig.Create(baseConfig);
|
||||
config.BuildTimeout = TimeSpan.FromMinutes(5);
|
||||
BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args, config);
|
||||
}
|
||||
}
|
||||
}
|
||||
178
Content.Benchmarks/PvsBenchmark.cs
Normal file
178
Content.Benchmarks/PvsBenchmark.cs
Normal file
@ -0,0 +1,178 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Content.IntegrationTests;
|
||||
using Content.IntegrationTests.Pair;
|
||||
using Content.Server.Mind;
|
||||
using Content.Server.Warps;
|
||||
using Content.Shared.Warps;
|
||||
using Robust.Shared;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.EntitySerialization;
|
||||
using Robust.Shared.EntitySerialization.Systems;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.Map;
|
||||
using Robust.Shared.Player;
|
||||
using Robust.Shared.Random;
|
||||
using Robust.Shared.Utility;
|
||||
|
||||
namespace Content.Benchmarks;
|
||||
|
||||
// This benchmark probably benefits from some accidental cache locality. I,e. the order in which entities in a pvs
|
||||
// chunk are sent to players matches the order in which the entities were spawned.
|
||||
//
|
||||
// in a real mid-late game round, this is probably no longer the case.
|
||||
// One way to somewhat offset this is to update the NetEntity assignment to assign random (but still unique) NetEntity uids to entities.
|
||||
// This makes the benchmark run noticeably slower.
|
||||
|
||||
[Virtual]
|
||||
public class PvsBenchmark
|
||||
{
|
||||
public const string Map = "Maps/box.yml";
|
||||
|
||||
[Params(1, 8, 80)]
|
||||
public int PlayerCount { get; set; }
|
||||
|
||||
private TestPair _pair = default!;
|
||||
private IEntityManager _entMan = default!;
|
||||
private ICommonSession[] _players = default!;
|
||||
private EntityCoordinates[] _spawns = default!;
|
||||
public int _cycleOffset = 0;
|
||||
private SharedTransformSystem _sys = default!;
|
||||
private EntityCoordinates[] _locations = default!;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
#if !DEBUG
|
||||
ProgramShared.PathOffset = "../../../../";
|
||||
#endif
|
||||
PoolManager.Startup();
|
||||
|
||||
_pair = PoolManager.GetServerClient().GetAwaiter().GetResult();
|
||||
_entMan = _pair.Server.ResolveDependency<IEntityManager>();
|
||||
_pair.Server.CfgMan.SetCVar(CVars.NetPVS, true);
|
||||
_pair.Server.CfgMan.SetCVar(CVars.ThreadParallelCount, 0);
|
||||
_pair.Server.CfgMan.SetCVar(CVars.NetPvsAsync, false);
|
||||
_sys = _entMan.System<SharedTransformSystem>();
|
||||
|
||||
SetupAsync().Wait();
|
||||
}
|
||||
|
||||
private async Task SetupAsync()
|
||||
{
|
||||
// Spawn the map
|
||||
_pair.Server.ResolveDependency<IRobustRandom>().SetSeed(42);
|
||||
await _pair.Server.WaitPost(() =>
|
||||
{
|
||||
var path = new ResPath(Map);
|
||||
var opts = DeserializationOptions.Default with {InitializeMaps = true};
|
||||
if (!_entMan.System<MapLoaderSystem>().TryLoadMap(path, out _, out _, opts))
|
||||
throw new Exception("Map load failed");
|
||||
});
|
||||
|
||||
// Get list of ghost warp positions
|
||||
_spawns = _entMan.AllComponentsList<WarpPointComponent>()
|
||||
.OrderBy(x => x.Component.Location)
|
||||
.Select(x => _entMan.GetComponent<TransformComponent>(x.Uid).Coordinates)
|
||||
.ToArray();
|
||||
|
||||
Array.Resize(ref _players, PlayerCount);
|
||||
|
||||
// Spawn "Players"
|
||||
_players = await _pair.Server.AddDummySessions(PlayerCount);
|
||||
await _pair.Server.WaitPost(() =>
|
||||
{
|
||||
var mind = _pair.Server.System<MindSystem>();
|
||||
for (var i = 0; i < PlayerCount; i++)
|
||||
{
|
||||
var pos = _spawns[i % _spawns.Length];
|
||||
var uid =_entMan.SpawnEntity("MobHuman", pos);
|
||||
_pair.Server.ConsoleHost.ExecuteCommand($"setoutfit {_entMan.GetNetEntity(uid)} CaptainGear");
|
||||
mind.ControlMob(_players[i].UserId, uid);
|
||||
}
|
||||
});
|
||||
|
||||
// Repeatedly move players around so that they "explore" the map and see lots of entities.
|
||||
// This will populate their PVS data with out-of-view entities.
|
||||
var rng = new Random(42);
|
||||
ShufflePlayers(rng, 100);
|
||||
|
||||
_pair.Server.PvsTick(_players);
|
||||
_pair.Server.PvsTick(_players);
|
||||
|
||||
var ents = _players.Select(x => x.AttachedEntity!.Value).ToArray();
|
||||
_locations = ents.Select(x => _entMan.GetComponent<TransformComponent>(x).Coordinates).ToArray();
|
||||
}
|
||||
|
||||
private void ShufflePlayers(Random rng, int count)
|
||||
{
|
||||
while (count > 0)
|
||||
{
|
||||
ShufflePlayers(rng);
|
||||
count--;
|
||||
}
|
||||
}
|
||||
|
||||
private void ShufflePlayers(Random rng)
|
||||
{
|
||||
_pair.Server.PvsTick(_players);
|
||||
|
||||
var ents = _players.Select(x => x.AttachedEntity!.Value).ToArray();
|
||||
var locations = ents.Select(x => _entMan.GetComponent<TransformComponent>(x).Coordinates).ToArray();
|
||||
|
||||
// Shuffle locations
|
||||
var n = locations.Length;
|
||||
while (n > 1)
|
||||
{
|
||||
n -= 1;
|
||||
var k = rng.Next(n + 1);
|
||||
(locations[k], locations[n]) = (locations[n], locations[k]);
|
||||
}
|
||||
|
||||
_pair.Server.WaitPost(() =>
|
||||
{
|
||||
for (var i = 0; i < PlayerCount; i++)
|
||||
{
|
||||
_sys.SetCoordinates(ents[i], locations[i]);
|
||||
}
|
||||
}).Wait();
|
||||
|
||||
_pair.Server.PvsTick(_players);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Basic benchmark for PVS in a static situation where nothing moves or gets dirtied..
|
||||
/// This effectively provides a lower bound on "real" pvs tick time, as it is missing:
|
||||
/// - PVS chunks getting dirtied and needing to be rebuilt
|
||||
/// - Fetching component states for dirty components
|
||||
/// - Compressing & sending network messages
|
||||
/// - Sending PVS leave messages
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public void StaticTick()
|
||||
{
|
||||
_pair.Server.PvsTick(_players);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Basic benchmark for PVS in a situation where players are teleporting all over the place. This isn't very
|
||||
/// realistic, but unlike <see cref="StaticTick"/> this will actually also measure the speed of processing dirty
|
||||
/// chunks and sending PVS leave messages.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public void CycleTick()
|
||||
{
|
||||
_cycleOffset = (_cycleOffset + 1) % _players.Length;
|
||||
_pair.Server.WaitPost(() =>
|
||||
{
|
||||
for (var i = 0; i < PlayerCount; i++)
|
||||
{
|
||||
_sys.SetCoordinates(_players[i].AttachedEntity!.Value, _locations[(i + _cycleOffset) % _players.Length]);
|
||||
}
|
||||
}).Wait();
|
||||
_pair.Server.PvsTick(_players);
|
||||
}
|
||||
}
|
||||
126
Content.Benchmarks/RaiseEventBenchmark.cs
Normal file
126
Content.Benchmarks/RaiseEventBenchmark.cs
Normal file
@ -0,0 +1,126 @@
|
||||
#nullable enable
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading.Tasks;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Content.IntegrationTests;
|
||||
using Content.IntegrationTests.Pair;
|
||||
using Robust.Shared;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.GameObjects;
|
||||
|
||||
namespace Content.Benchmarks;
|
||||
|
||||
[Virtual]
|
||||
public class RaiseEventBenchmark
|
||||
{
|
||||
private TestPair _pair = default!;
|
||||
private BenchSystem _sys = default!;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
ProgramShared.PathOffset = "../../../../";
|
||||
PoolManager.Startup(typeof(BenchSystem).Assembly);
|
||||
_pair = PoolManager.GetServerClient().GetAwaiter().GetResult();
|
||||
var entMan = _pair.Server.EntMan;
|
||||
_sys = entMan.System<BenchSystem>();
|
||||
|
||||
_pair.Server.WaitPost(() =>
|
||||
{
|
||||
var uid = entMan.Spawn();
|
||||
_sys.Ent = new(uid, entMan.GetComponent<TransformComponent>(uid));
|
||||
_sys.Ent2 = new(_sys.Ent.Owner, _sys.Ent.Comp);
|
||||
})
|
||||
.GetAwaiter()
|
||||
.GetResult();
|
||||
}
|
||||
|
||||
[GlobalCleanup]
|
||||
public async Task Cleanup()
|
||||
{
|
||||
await _pair.DisposeAsync();
|
||||
PoolManager.Shutdown();
|
||||
}
|
||||
|
||||
[Benchmark(Baseline = true)]
|
||||
public int RaiseEvent()
|
||||
{
|
||||
return _sys.RaiseEvent();
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public int RaiseCompEvent()
|
||||
{
|
||||
return _sys.RaiseCompEvent();
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public int RaiseICompEvent()
|
||||
{
|
||||
return _sys.RaiseICompEvent();
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public int RaiseCSharpEvent()
|
||||
{
|
||||
return _sys.CSharpEvent();
|
||||
}
|
||||
|
||||
public sealed class BenchSystem : EntitySystem
|
||||
{
|
||||
public Entity<TransformComponent> Ent;
|
||||
public Entity<IComponent> Ent2;
|
||||
|
||||
public delegate void EntityEventHandler(EntityUid uid, TransformComponent comp, ref BenchEv ev);
|
||||
|
||||
public event EntityEventHandler? OnCSharpEvent;
|
||||
|
||||
public override void Initialize()
|
||||
{
|
||||
base.Initialize();
|
||||
SubscribeLocalEvent<TransformComponent, BenchEv>(OnEvent);
|
||||
OnCSharpEvent += OnEvent;
|
||||
}
|
||||
|
||||
public int RaiseEvent()
|
||||
{
|
||||
var ev = new BenchEv();
|
||||
RaiseLocalEvent(Ent.Owner, ref ev);
|
||||
return ev.N;
|
||||
}
|
||||
|
||||
public int RaiseCompEvent()
|
||||
{
|
||||
var ev = new BenchEv();
|
||||
EntityManager.EventBus.RaiseComponentEvent(Ent.Owner, Ent.Comp, ref ev);
|
||||
return ev.N;
|
||||
}
|
||||
|
||||
public int RaiseICompEvent()
|
||||
{
|
||||
// Raise with an IComponent instead of concrete type
|
||||
var ev = new BenchEv();
|
||||
EntityManager.EventBus.RaiseComponentEvent(Ent2.Owner, Ent2.Comp, ref ev);
|
||||
return ev.N;
|
||||
}
|
||||
|
||||
public int CSharpEvent()
|
||||
{
|
||||
var ev = new BenchEv();
|
||||
OnCSharpEvent?.Invoke(Ent.Owner, Ent.Comp, ref ev);
|
||||
return ev.N;
|
||||
}
|
||||
|
||||
[MethodImpl(MethodImplOptions.NoInlining)]
|
||||
private void OnEvent(EntityUid uid, TransformComponent component, ref BenchEv args)
|
||||
{
|
||||
args.N += uid.Id;
|
||||
}
|
||||
|
||||
[ByRefEvent]
|
||||
public struct BenchEv
|
||||
{
|
||||
public int N;
|
||||
}
|
||||
}
|
||||
}
|
||||
66
Content.Benchmarks/SpawnEquipDeleteBenchmark.cs
Normal file
66
Content.Benchmarks/SpawnEquipDeleteBenchmark.cs
Normal file
@ -0,0 +1,66 @@
|
||||
using System.Threading.Tasks;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Content.IntegrationTests;
|
||||
using Content.IntegrationTests.Pair;
|
||||
using Content.Server.Station.Systems;
|
||||
using Content.Shared.Roles;
|
||||
using Robust.Shared;
|
||||
using Robust.Shared.Analyzers;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.Map;
|
||||
|
||||
namespace Content.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// This benchmarks spawns several humans, gives them captain equipment and then deletes them.
|
||||
/// This measures performance for spawning, deletion, containers, and inventory code.
|
||||
/// </summary>
|
||||
[Virtual, MemoryDiagnoser]
|
||||
public class SpawnEquipDeleteBenchmark
|
||||
{
|
||||
private TestPair _pair = default!;
|
||||
private StationSpawningSystem _spawnSys = default!;
|
||||
private const string Mob = "MobHuman";
|
||||
private StartingGearPrototype _gear = default!;
|
||||
private EntityUid _entity;
|
||||
private EntityCoordinates _coords;
|
||||
|
||||
[Params(1, 4, 16, 64)]
|
||||
public int N;
|
||||
|
||||
[GlobalSetup]
|
||||
public async Task SetupAsync()
|
||||
{
|
||||
ProgramShared.PathOffset = "../../../../";
|
||||
PoolManager.Startup();
|
||||
_pair = await PoolManager.GetServerClient();
|
||||
var server = _pair.Server;
|
||||
|
||||
var mapData = await _pair.CreateTestMap();
|
||||
_coords = mapData.GridCoords;
|
||||
_spawnSys = server.System<StationSpawningSystem>();
|
||||
_gear = server.ProtoMan.Index<StartingGearPrototype>("CaptainGear");
|
||||
}
|
||||
|
||||
[GlobalCleanup]
|
||||
public async Task Cleanup()
|
||||
{
|
||||
await _pair.DisposeAsync();
|
||||
PoolManager.Shutdown();
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public async Task SpawnDeletePlayer()
|
||||
{
|
||||
await _pair.Server.WaitPost(() =>
|
||||
{
|
||||
var server = _pair.Server;
|
||||
for (var i = 0; i < N; i++)
|
||||
{
|
||||
_entity = server.EntMan.SpawnAttachedTo(Mob, _coords);
|
||||
_spawnSys.EquipStartingGear(_entity, _gear);
|
||||
server.EntMan.DeleteEntity(_entity);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
72
Content.Benchmarks/StereoToMonoBenchmark.cs
Normal file
72
Content.Benchmarks/StereoToMonoBenchmark.cs
Normal file
@ -0,0 +1,72 @@
|
||||
using System.Runtime.Intrinsics.X86;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using Robust.Shared.Analyzers;
|
||||
|
||||
namespace Content.Benchmarks
|
||||
{
|
||||
[Virtual]
|
||||
public class StereoToMonoBenchmark
|
||||
{
|
||||
[Params(128, 256, 512)]
|
||||
public int N { get; set; }
|
||||
|
||||
private short[] _input;
|
||||
private short[] _output;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_input = new short[N * 2];
|
||||
_output = new short[N];
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void BenchSimple()
|
||||
{
|
||||
var l = N;
|
||||
for (var j = 0; j < l; j++)
|
||||
{
|
||||
var k = j + l;
|
||||
_output[j] = (short) ((_input[k] + _input[j]) / 2);
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public unsafe void BenchSse()
|
||||
{
|
||||
var l = N;
|
||||
fixed (short* iPtr = _input)
|
||||
fixed (short* oPtr = _output)
|
||||
{
|
||||
for (var j = 0; j < l; j += 8)
|
||||
{
|
||||
var k = j + l;
|
||||
|
||||
var jV = Sse2.ShiftRightArithmetic(Sse2.LoadVector128(iPtr + j), 1);
|
||||
var kV = Sse2.ShiftRightArithmetic(Sse2.LoadVector128(iPtr + k), 1);
|
||||
|
||||
Sse2.Store(j + oPtr, Sse2.Add(jV, kV));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public unsafe void BenchAvx2()
|
||||
{
|
||||
var l = N;
|
||||
fixed (short* iPtr = _input)
|
||||
fixed (short* oPtr = _output)
|
||||
{
|
||||
for (var j = 0; j < l; j += 16)
|
||||
{
|
||||
var k = j + l;
|
||||
|
||||
var jV = Avx2.ShiftRightArithmetic(Avx.LoadVector256(iPtr + j), 1);
|
||||
var kV = Avx2.ShiftRightArithmetic(Avx.LoadVector256(iPtr + k), 1);
|
||||
|
||||
Avx.Store(j + oPtr, Avx2.Add(jV, kV));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
94
Content.Client/Access/AccessOverlay.cs
Normal file
94
Content.Client/Access/AccessOverlay.cs
Normal file
@ -0,0 +1,94 @@
|
||||
using System.Text;
|
||||
using Content.Client.Resources;
|
||||
using Content.Shared.Access.Components;
|
||||
using Robust.Client.Graphics;
|
||||
using Robust.Client.ResourceManagement;
|
||||
using Robust.Shared.Enums;
|
||||
|
||||
namespace Content.Client.Access;
|
||||
|
||||
public sealed class AccessOverlay : Overlay
|
||||
{
|
||||
private const string TextFontPath = "/Fonts/NotoSans/NotoSans-Regular.ttf";
|
||||
private const int TextFontSize = 12;
|
||||
|
||||
private readonly IEntityManager _entityManager;
|
||||
private readonly SharedTransformSystem _transformSystem;
|
||||
private readonly Font _font;
|
||||
|
||||
public override OverlaySpace Space => OverlaySpace.ScreenSpace;
|
||||
|
||||
public AccessOverlay(IEntityManager entityManager, IResourceCache resourceCache, SharedTransformSystem transformSystem)
|
||||
{
|
||||
_entityManager = entityManager;
|
||||
_transformSystem = transformSystem;
|
||||
_font = resourceCache.GetFont(TextFontPath, TextFontSize);
|
||||
}
|
||||
|
||||
protected override void Draw(in OverlayDrawArgs args)
|
||||
{
|
||||
if (args.ViewportControl == null)
|
||||
return;
|
||||
|
||||
var textBuffer = new StringBuilder();
|
||||
var query = _entityManager.EntityQueryEnumerator<AccessReaderComponent, TransformComponent>();
|
||||
while (query.MoveNext(out var uid, out var accessReader, out var transform))
|
||||
{
|
||||
textBuffer.Clear();
|
||||
|
||||
var entityName = _entityManager.ToPrettyString(uid);
|
||||
textBuffer.AppendLine(entityName.Prototype);
|
||||
textBuffer.Append("UID: ");
|
||||
textBuffer.Append(entityName.Uid.Id);
|
||||
textBuffer.Append(", NUID: ");
|
||||
textBuffer.Append(entityName.Nuid.Id);
|
||||
textBuffer.AppendLine();
|
||||
|
||||
if (!accessReader.Enabled)
|
||||
{
|
||||
textBuffer.AppendLine("-Disabled");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (accessReader.AccessLists.Count > 0)
|
||||
{
|
||||
var groupNumber = 0;
|
||||
foreach (var accessList in accessReader.AccessLists)
|
||||
{
|
||||
groupNumber++;
|
||||
foreach (var entry in accessList)
|
||||
{
|
||||
textBuffer.Append("+Set ");
|
||||
textBuffer.Append(groupNumber);
|
||||
textBuffer.Append(": ");
|
||||
textBuffer.Append(entry.Id);
|
||||
textBuffer.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
textBuffer.AppendLine("+Unrestricted");
|
||||
}
|
||||
|
||||
foreach (var key in accessReader.AccessKeys)
|
||||
{
|
||||
textBuffer.Append("+Key ");
|
||||
textBuffer.Append(key.OriginStation);
|
||||
textBuffer.Append(": ");
|
||||
textBuffer.Append(key.Id);
|
||||
textBuffer.AppendLine();
|
||||
}
|
||||
|
||||
foreach (var tag in accessReader.DenyTags)
|
||||
{
|
||||
textBuffer.Append("-Tag ");
|
||||
textBuffer.AppendLine(tag.Id);
|
||||
}
|
||||
|
||||
var accessInfoText = textBuffer.ToString();
|
||||
var screenPos = args.ViewportControl.WorldToScreen(_transformSystem.GetWorldPosition(transform));
|
||||
args.ScreenHandle.DrawString(_font, screenPos, accessInfoText, Color.Gold);
|
||||
}
|
||||
}
|
||||
}
|
||||
11
Content.Client/Access/AccessOverriderSystem.cs
Normal file
11
Content.Client/Access/AccessOverriderSystem.cs
Normal file
@ -0,0 +1,11 @@
|
||||
using Content.Shared.Access.Systems;
|
||||
using JetBrains.Annotations;
|
||||
|
||||
namespace Content.Client.Access
|
||||
{
|
||||
[UsedImplicitly]
|
||||
public sealed class AccessOverriderSystem : SharedAccessOverriderSystem
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
7
Content.Client/Access/AccessSystem.cs
Normal file
7
Content.Client/Access/AccessSystem.cs
Normal file
@ -0,0 +1,7 @@
|
||||
using Content.Shared.Access.Systems;
|
||||
|
||||
namespace Content.Client.Access;
|
||||
|
||||
public sealed class AccessSystem : SharedAccessSystem
|
||||
{
|
||||
}
|
||||
42
Content.Client/Access/Commands/ShowAccessReadersCommand.cs
Normal file
42
Content.Client/Access/Commands/ShowAccessReadersCommand.cs
Normal file
@ -0,0 +1,42 @@
|
||||
using Robust.Client.Graphics;
|
||||
using Robust.Client.ResourceManagement;
|
||||
using Robust.Shared.Console;
|
||||
|
||||
namespace Content.Client.Access.Commands;
|
||||
|
||||
public sealed class ShowAccessReadersCommand : IConsoleCommand
|
||||
{
|
||||
public string Command => "showaccessreaders";
|
||||
|
||||
public string Description => "Toggles showing access reader permissions on the map";
|
||||
public string Help => """
|
||||
Overlay Info:
|
||||
-Disabled | The access reader is disabled
|
||||
+Unrestricted | The access reader has no restrictions
|
||||
+Set [Index]: [Tag Name]| A tag in an access set (accessor needs all tags in the set to be allowed by the set)
|
||||
+Key [StationUid]: [StationRecordKeyId] | A StationRecordKey that is allowed
|
||||
-Tag [Tag Name] | A tag that is not allowed (takes priority over other allows)
|
||||
""";
|
||||
public void Execute(IConsoleShell shell, string argStr, string[] args)
|
||||
{
|
||||
var collection = IoCManager.Instance;
|
||||
|
||||
if (collection == null)
|
||||
return;
|
||||
|
||||
var overlay = collection.Resolve<IOverlayManager>();
|
||||
|
||||
if (overlay.RemoveOverlay<AccessOverlay>())
|
||||
{
|
||||
shell.WriteLine($"Set access reader debug overlay to false");
|
||||
return;
|
||||
}
|
||||
|
||||
var entManager = collection.Resolve<IEntityManager>();
|
||||
var cache = collection.Resolve<IResourceCache>();
|
||||
var xform = entManager.System<SharedTransformSystem>();
|
||||
|
||||
overlay.AddOverlay(new AccessOverlay(entManager, cache, xform));
|
||||
shell.WriteLine($"Set access reader debug overlay to true");
|
||||
}
|
||||
}
|
||||
13
Content.Client/Access/IdCardConsoleSystem.cs
Normal file
13
Content.Client/Access/IdCardConsoleSystem.cs
Normal file
@ -0,0 +1,13 @@
|
||||
using Content.Shared.Access.Systems;
|
||||
using JetBrains.Annotations;
|
||||
|
||||
namespace Content.Client.Access
|
||||
{
|
||||
[UsedImplicitly]
|
||||
public sealed class IdCardConsoleSystem : SharedIdCardConsoleSystem
|
||||
{
|
||||
// one day, maybe bound user interfaces can be shared too.
|
||||
// then this doesn't have to be like this.
|
||||
// I hate this.
|
||||
}
|
||||
}
|
||||
5
Content.Client/Access/IdCardSystem.cs
Normal file
5
Content.Client/Access/IdCardSystem.cs
Normal file
@ -0,0 +1,5 @@
|
||||
using Content.Shared.Access.Systems;
|
||||
|
||||
namespace Content.Client.Access;
|
||||
|
||||
public sealed class IdCardSystem : SharedIdCardSystem;
|
||||
4
Content.Client/Access/UI/AccessLevelControl.xaml
Normal file
4
Content.Client/Access/UI/AccessLevelControl.xaml
Normal file
@ -0,0 +1,4 @@
|
||||
<GridContainer xmlns="https://spacestation14.io"
|
||||
Columns="5"
|
||||
HorizontalAlignment="Center">
|
||||
</GridContainer>
|
||||
59
Content.Client/Access/UI/AccessLevelControl.xaml.cs
Normal file
59
Content.Client/Access/UI/AccessLevelControl.xaml.cs
Normal file
@ -0,0 +1,59 @@
|
||||
using System.Linq;
|
||||
using Robust.Client.AutoGenerated;
|
||||
using Robust.Client.UserInterface;
|
||||
using Robust.Client.UserInterface.Controls;
|
||||
using Robust.Client.UserInterface.XAML;
|
||||
using Robust.Shared.Prototypes;
|
||||
using Content.Shared.Access;
|
||||
using Content.Shared.Access.Systems;
|
||||
|
||||
namespace Content.Client.Access.UI;
|
||||
|
||||
[GenerateTypedNameReferences]
|
||||
public sealed partial class AccessLevelControl : GridContainer
|
||||
{
|
||||
[Dependency] private readonly ILogManager _logManager = default!;
|
||||
|
||||
private ISawmill _sawmill = default!;
|
||||
|
||||
public readonly Dictionary<ProtoId<AccessLevelPrototype>, Button> ButtonsList = new();
|
||||
|
||||
public AccessLevelControl()
|
||||
{
|
||||
RobustXamlLoader.Load(this);
|
||||
IoCManager.InjectDependencies(this);
|
||||
|
||||
_sawmill = _logManager.GetSawmill("accesslevelcontrol");
|
||||
}
|
||||
|
||||
public void Populate(List<ProtoId<AccessLevelPrototype>> accessLevels, IPrototypeManager prototypeManager)
|
||||
{
|
||||
foreach (var access in accessLevels)
|
||||
{
|
||||
if (!prototypeManager.TryIndex(access, out var accessLevel))
|
||||
{
|
||||
_sawmill.Error($"Unable to find accesslevel for {access}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var newButton = new Button
|
||||
{
|
||||
Text = accessLevel.GetAccessLevelName(),
|
||||
ToggleMode = true,
|
||||
};
|
||||
AddChild(newButton);
|
||||
ButtonsList.Add(accessLevel.ID, newButton);
|
||||
}
|
||||
}
|
||||
|
||||
public void UpdateState(
|
||||
List<ProtoId<AccessLevelPrototype>> pressedList,
|
||||
List<ProtoId<AccessLevelPrototype>>? enabledList = null)
|
||||
{
|
||||
foreach (var (accessName, button) in ButtonsList)
|
||||
{
|
||||
button.Pressed = pressedList.Contains(accessName);
|
||||
button.Disabled = !(enabledList?.Contains(accessName) ?? true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,77 @@
|
||||
using Content.Shared.Access;
|
||||
using Content.Shared.Access.Components;
|
||||
using Content.Shared.Access.Systems;
|
||||
using Content.Shared.Containers.ItemSlots;
|
||||
using Robust.Client.UserInterface;
|
||||
using Robust.Shared.Prototypes;
|
||||
using static Content.Shared.Access.Components.AccessOverriderComponent;
|
||||
|
||||
namespace Content.Client.Access.UI
|
||||
{
|
||||
public sealed class AccessOverriderBoundUserInterface : BoundUserInterface
|
||||
{
|
||||
[Dependency] private readonly IPrototypeManager _prototypeManager = default!;
|
||||
private readonly SharedAccessOverriderSystem _accessOverriderSystem = default!;
|
||||
|
||||
private AccessOverriderWindow? _window;
|
||||
|
||||
public AccessOverriderBoundUserInterface(EntityUid owner, Enum uiKey) : base(owner, uiKey)
|
||||
{
|
||||
_accessOverriderSystem = EntMan.System<SharedAccessOverriderSystem>();
|
||||
}
|
||||
|
||||
protected override void Open()
|
||||
{
|
||||
base.Open();
|
||||
|
||||
_window = this.CreateWindow<AccessOverriderWindow>();
|
||||
RefreshAccess();
|
||||
_window.Title = EntMan.GetComponent<MetaDataComponent>(Owner).EntityName;
|
||||
_window.OnSubmit += SubmitData;
|
||||
|
||||
_window.PrivilegedIdButton.OnPressed += _ => SendMessage(new ItemSlotButtonPressedEvent(PrivilegedIdCardSlotId));
|
||||
}
|
||||
|
||||
public override void OnProtoReload(PrototypesReloadedEventArgs args)
|
||||
{
|
||||
base.OnProtoReload(args);
|
||||
if (!args.WasModified<AccessLevelPrototype>())
|
||||
return;
|
||||
|
||||
RefreshAccess();
|
||||
|
||||
if (State != null)
|
||||
_window?.UpdateState(_prototypeManager, (AccessOverriderBoundUserInterfaceState) State);
|
||||
}
|
||||
|
||||
private void RefreshAccess()
|
||||
{
|
||||
List<ProtoId<AccessLevelPrototype>> accessLevels;
|
||||
|
||||
if (EntMan.TryGetComponent<AccessOverriderComponent>(Owner, out var accessOverrider))
|
||||
{
|
||||
accessLevels = accessOverrider.AccessLevels;
|
||||
// accessLevels.Sort(); // Frontier: use order specified in YAML
|
||||
}
|
||||
else
|
||||
{
|
||||
accessLevels = new List<ProtoId<AccessLevelPrototype>>();
|
||||
_accessOverriderSystem.Log.Error($"No AccessOverrider component found for {EntMan.ToPrettyString(Owner)}!");
|
||||
}
|
||||
|
||||
_window?.SetAccessLevels(_prototypeManager, accessLevels);
|
||||
}
|
||||
|
||||
protected override void UpdateState(BoundUserInterfaceState state)
|
||||
{
|
||||
base.UpdateState(state);
|
||||
var castState = (AccessOverriderBoundUserInterfaceState) state;
|
||||
_window?.UpdateState(_prototypeManager, castState);
|
||||
}
|
||||
|
||||
public void SubmitData(List<ProtoId<AccessLevelPrototype>> newAccessList)
|
||||
{
|
||||
SendMessage(new WriteToTargetAccessReaderIdMessage(newAccessList));
|
||||
}
|
||||
}
|
||||
}
|
||||
23
Content.Client/Access/UI/AccessOverriderWindow.xaml
Normal file
23
Content.Client/Access/UI/AccessOverriderWindow.xaml
Normal file
@ -0,0 +1,23 @@
|
||||
<DefaultWindow xmlns="https://spacestation14.io"
|
||||
MinSize="650 290">
|
||||
<BoxContainer Orientation="Vertical">
|
||||
<GridContainer Columns="2">
|
||||
<GridContainer Columns="3" HorizontalExpand="True">
|
||||
<Label Text="{Loc 'access-overrider-window-privileged-id'}" />
|
||||
<Button Name="PrivilegedIdButton" Access="Public"/>
|
||||
<Label Name="PrivilegedIdLabel" />
|
||||
</GridContainer>
|
||||
</GridContainer>
|
||||
<Label Name="TargetNameLabel" />
|
||||
<Control MinSize="0 8"/>
|
||||
<GridContainer Name="AccessLevelGrid" Columns="5" HorizontalAlignment="Center">
|
||||
|
||||
<!-- Access level buttons are added here by the C# code -->
|
||||
|
||||
</GridContainer>
|
||||
<Control MinSize="0 8"/>
|
||||
<Label Name="MissingPrivilegesLabel" />
|
||||
<Control MinSize="0 4"/>
|
||||
<Label Name="MissingPrivilegesText" />
|
||||
</BoxContainer>
|
||||
</DefaultWindow>
|
||||
98
Content.Client/Access/UI/AccessOverriderWindow.xaml.cs
Normal file
98
Content.Client/Access/UI/AccessOverriderWindow.xaml.cs
Normal file
@ -0,0 +1,98 @@
|
||||
using System.Linq;
|
||||
using Content.Shared.Access;
|
||||
using Content.Shared.Access.Systems;
|
||||
using Robust.Client.AutoGenerated;
|
||||
using Robust.Client.UserInterface.Controls;
|
||||
using Robust.Client.UserInterface.CustomControls;
|
||||
using Robust.Client.UserInterface.XAML;
|
||||
using Robust.Shared.Prototypes;
|
||||
using static Content.Shared.Access.Components.AccessOverriderComponent;
|
||||
|
||||
namespace Content.Client.Access.UI
|
||||
{
|
||||
[GenerateTypedNameReferences]
|
||||
public sealed partial class AccessOverriderWindow : DefaultWindow
|
||||
{
|
||||
private readonly Dictionary<string, Button> _accessButtons = new();
|
||||
|
||||
public event Action<List<ProtoId<AccessLevelPrototype>>>? OnSubmit;
|
||||
|
||||
public AccessOverriderWindow()
|
||||
{
|
||||
RobustXamlLoader.Load(this);
|
||||
}
|
||||
|
||||
public void SetAccessLevels(IPrototypeManager protoManager, List<ProtoId<AccessLevelPrototype>> accessLevels)
|
||||
{
|
||||
_accessButtons.Clear();
|
||||
AccessLevelGrid.DisposeAllChildren();
|
||||
|
||||
foreach (var access in accessLevels)
|
||||
{
|
||||
if (!protoManager.TryIndex(access, out var accessLevel))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var newButton = new Button
|
||||
{
|
||||
Text = accessLevel.GetAccessLevelName(),
|
||||
ToggleMode = true,
|
||||
};
|
||||
|
||||
AccessLevelGrid.AddChild(newButton);
|
||||
_accessButtons.Add(accessLevel.ID, newButton);
|
||||
newButton.OnPressed += _ =>
|
||||
{
|
||||
OnSubmit?.Invoke(
|
||||
// Iterate over the buttons dictionary, filter by `Pressed`, only get key from the key/value pair
|
||||
_accessButtons.Where(x => x.Value.Pressed).Select(x => new ProtoId<AccessLevelPrototype>(x.Key)).ToList());
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public void UpdateState(IPrototypeManager protoManager, AccessOverriderBoundUserInterfaceState state)
|
||||
{
|
||||
PrivilegedIdLabel.Text = state.PrivilegedIdName;
|
||||
PrivilegedIdButton.Text = state.IsPrivilegedIdPresent
|
||||
? Loc.GetString("access-overrider-window-eject-button")
|
||||
: Loc.GetString("access-overrider-window-insert-button");
|
||||
|
||||
TargetNameLabel.Text = state.TargetLabel;
|
||||
TargetNameLabel.FontColorOverride = state.TargetLabelColor;
|
||||
|
||||
MissingPrivilegesLabel.Text = "";
|
||||
MissingPrivilegesLabel.FontColorOverride = Color.Yellow;
|
||||
|
||||
MissingPrivilegesText.Text = "";
|
||||
MissingPrivilegesText.FontColorOverride = Color.Yellow;
|
||||
|
||||
if (state.MissingPrivilegesList != null && state.MissingPrivilegesList.Any())
|
||||
{
|
||||
var missingPrivileges = new List<string>();
|
||||
|
||||
foreach (string tag in state.MissingPrivilegesList)
|
||||
{
|
||||
var privilege = Loc.GetString(protoManager.Index<AccessLevelPrototype>(tag)?.Name ?? "generic-unknown");
|
||||
missingPrivileges.Add(privilege);
|
||||
}
|
||||
|
||||
MissingPrivilegesLabel.Text = Loc.GetString("access-overrider-window-missing-privileges");
|
||||
MissingPrivilegesText.Text = string.Join(", ", missingPrivileges);
|
||||
}
|
||||
|
||||
var interfaceEnabled = state.IsPrivilegedIdPresent && state.IsPrivilegedIdAuthorized;
|
||||
|
||||
foreach (var (accessName, button) in _accessButtons)
|
||||
{
|
||||
button.Disabled = !interfaceEnabled;
|
||||
if (interfaceEnabled)
|
||||
{
|
||||
// Explicit cast because Rider gives a false error otherwise.
|
||||
button.Pressed = state.TargetAccessReaderIdAccessList?.Contains((ProtoId<AccessLevelPrototype>) accessName) ?? false;
|
||||
button.Disabled = (!state.AllowedModifyAccessList?.Contains((ProtoId<AccessLevelPrototype>) accessName)) ?? true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
61
Content.Client/Access/UI/AgentIDCardBoundUserInterface.cs
Normal file
61
Content.Client/Access/UI/AgentIDCardBoundUserInterface.cs
Normal file
@ -0,0 +1,61 @@
|
||||
using Content.Shared.Access.Systems;
|
||||
using Content.Shared.StatusIcon;
|
||||
using Robust.Client.GameObjects;
|
||||
using Robust.Client.UserInterface;
|
||||
using Robust.Shared.Prototypes;
|
||||
|
||||
namespace Content.Client.Access.UI
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a <see cref="AgentIDCardWindow"/> and updates it when new server messages are received.
|
||||
/// </summary>
|
||||
public sealed class AgentIDCardBoundUserInterface : BoundUserInterface
|
||||
{
|
||||
private AgentIDCardWindow? _window;
|
||||
|
||||
public AgentIDCardBoundUserInterface(EntityUid owner, Enum uiKey) : base(owner, uiKey)
|
||||
{
|
||||
}
|
||||
|
||||
protected override void Open()
|
||||
{
|
||||
base.Open();
|
||||
|
||||
_window = this.CreateWindow<AgentIDCardWindow>();
|
||||
|
||||
_window.OnNameChanged += OnNameChanged;
|
||||
_window.OnJobChanged += OnJobChanged;
|
||||
_window.OnJobIconChanged += OnJobIconChanged;
|
||||
}
|
||||
|
||||
private void OnNameChanged(string newName)
|
||||
{
|
||||
SendMessage(new AgentIDCardNameChangedMessage(newName));
|
||||
}
|
||||
|
||||
private void OnJobChanged(string newJob)
|
||||
{
|
||||
SendMessage(new AgentIDCardJobChangedMessage(newJob));
|
||||
}
|
||||
|
||||
public void OnJobIconChanged(ProtoId<JobIconPrototype> newJobIconId)
|
||||
{
|
||||
SendMessage(new AgentIDCardJobIconChangedMessage(newJobIconId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Update the UI state based on server-sent info
|
||||
/// </summary>
|
||||
/// <param name="state"></param>
|
||||
protected override void UpdateState(BoundUserInterfaceState state)
|
||||
{
|
||||
base.UpdateState(state);
|
||||
if (_window == null || state is not AgentIDCardBoundUserInterfaceState cast)
|
||||
return;
|
||||
|
||||
_window.SetCurrentName(cast.CurrentName);
|
||||
_window.SetCurrentJob(cast.CurrentJob);
|
||||
_window.SetAllowedIcons(cast.CurrentJobIconId);
|
||||
}
|
||||
}
|
||||
}
|
||||
14
Content.Client/Access/UI/AgentIDCardWindow.xaml
Normal file
14
Content.Client/Access/UI/AgentIDCardWindow.xaml
Normal file
@ -0,0 +1,14 @@
|
||||
<DefaultWindow xmlns="https://spacestation14.io"
|
||||
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
|
||||
Title="{Loc agent-id-menu-title}">
|
||||
<BoxContainer Orientation="Vertical" SeparationOverride="4" MinWidth="150">
|
||||
<Label Name="CurrentName" Text="{Loc 'agent-id-card-current-name'}" />
|
||||
<LineEdit Name="NameLineEdit" />
|
||||
<Label Name="CurrentJob" Text="{Loc 'agent-id-card-current-job'}" />
|
||||
<LineEdit Name="JobLineEdit" />
|
||||
<Label Text="{Loc 'agent-id-card-job-icon-label'}"/>
|
||||
<GridContainer Name="IconGrid" Columns="10">
|
||||
<!-- Job icon buttons are generated in the code -->
|
||||
</GridContainer>
|
||||
</BoxContainer>
|
||||
</DefaultWindow>
|
||||
96
Content.Client/Access/UI/AgentIDCardWindow.xaml.cs
Normal file
96
Content.Client/Access/UI/AgentIDCardWindow.xaml.cs
Normal file
@ -0,0 +1,96 @@
|
||||
using Content.Client.Stylesheets;
|
||||
using Content.Shared.StatusIcon;
|
||||
using Robust.Client.AutoGenerated;
|
||||
using Robust.Client.GameObjects;
|
||||
using Robust.Client.UserInterface;
|
||||
using Robust.Client.UserInterface.Controls;
|
||||
using Robust.Client.UserInterface.CustomControls;
|
||||
using Robust.Client.UserInterface.XAML;
|
||||
using Robust.Shared.Prototypes;
|
||||
using System.Numerics;
|
||||
using System.Linq;
|
||||
|
||||
namespace Content.Client.Access.UI
|
||||
{
|
||||
[GenerateTypedNameReferences]
|
||||
public sealed partial class AgentIDCardWindow : DefaultWindow
|
||||
{
|
||||
[Dependency] private readonly IPrototypeManager _prototypeManager = default!;
|
||||
[Dependency] private readonly IEntitySystemManager _entitySystem = default!;
|
||||
private readonly SpriteSystem _spriteSystem;
|
||||
|
||||
private const int JobIconColumnCount = 10;
|
||||
|
||||
public event Action<string>? OnNameChanged;
|
||||
public event Action<string>? OnJobChanged;
|
||||
|
||||
public event Action<ProtoId<JobIconPrototype>>? OnJobIconChanged;
|
||||
|
||||
public AgentIDCardWindow()
|
||||
{
|
||||
RobustXamlLoader.Load(this);
|
||||
IoCManager.InjectDependencies(this);
|
||||
_spriteSystem = _entitySystem.GetEntitySystem<SpriteSystem>();
|
||||
|
||||
NameLineEdit.OnTextEntered += e => OnNameChanged?.Invoke(e.Text);
|
||||
NameLineEdit.OnFocusExit += e => OnNameChanged?.Invoke(e.Text);
|
||||
|
||||
JobLineEdit.OnTextEntered += e => OnJobChanged?.Invoke(e.Text);
|
||||
JobLineEdit.OnFocusExit += e => OnJobChanged?.Invoke(e.Text);
|
||||
}
|
||||
|
||||
public void SetAllowedIcons(string currentJobIconId)
|
||||
{
|
||||
IconGrid.DisposeAllChildren();
|
||||
|
||||
var jobIconButtonGroup = new ButtonGroup();
|
||||
var i = 0;
|
||||
var icons = _prototypeManager.EnumeratePrototypes<JobIconPrototype>().Where(icon => icon.AllowSelection).ToList();
|
||||
icons.Sort((x, y) => string.Compare(x.LocalizedJobName, y.LocalizedJobName, StringComparison.CurrentCulture));
|
||||
foreach (var jobIcon in icons)
|
||||
{
|
||||
String styleBase = StyleBase.ButtonOpenBoth;
|
||||
var modulo = i % JobIconColumnCount;
|
||||
if (modulo == 0)
|
||||
styleBase = StyleBase.ButtonOpenRight;
|
||||
else if (modulo == JobIconColumnCount - 1)
|
||||
styleBase = StyleBase.ButtonOpenLeft;
|
||||
|
||||
// Generate buttons
|
||||
var jobIconButton = new Button
|
||||
{
|
||||
Access = AccessLevel.Public,
|
||||
StyleClasses = { styleBase },
|
||||
MaxSize = new Vector2(42, 28),
|
||||
Group = jobIconButtonGroup,
|
||||
Pressed = currentJobIconId == jobIcon.ID,
|
||||
ToolTip = jobIcon.LocalizedJobName
|
||||
};
|
||||
|
||||
// Generate buttons textures
|
||||
var jobIconTexture = new TextureRect
|
||||
{
|
||||
Texture = _spriteSystem.Frame0(jobIcon.Icon),
|
||||
TextureScale = new Vector2(2.5f, 2.5f),
|
||||
Stretch = TextureRect.StretchMode.KeepCentered,
|
||||
};
|
||||
|
||||
jobIconButton.AddChild(jobIconTexture);
|
||||
jobIconButton.OnPressed += _ => OnJobIconChanged?.Invoke(jobIcon.ID);
|
||||
IconGrid.AddChild(jobIconButton);
|
||||
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
public void SetCurrentName(string name)
|
||||
{
|
||||
NameLineEdit.Text = name;
|
||||
}
|
||||
|
||||
public void SetCurrentJob(string job)
|
||||
{
|
||||
JobLineEdit.Text = job;
|
||||
}
|
||||
}
|
||||
}
|
||||
26
Content.Client/Access/UI/GroupedAccessLevelChecklist.xaml
Normal file
26
Content.Client/Access/UI/GroupedAccessLevelChecklist.xaml
Normal file
@ -0,0 +1,26 @@
|
||||
<BoxContainer xmlns="https://spacestation14.io"
|
||||
xmlns:gfx="clr-namespace:Robust.Client.Graphics;assembly=Robust.Client"
|
||||
Orientation="Horizontal"
|
||||
Margin="10 10 10 10"
|
||||
VerticalExpand="True"
|
||||
HorizontalExpand="True"
|
||||
MinHeight="70">
|
||||
|
||||
<!-- Access groups -->
|
||||
<BoxContainer Name="AccessGroupList" Access="Public" Orientation="Vertical" HorizontalExpand="True" SizeFlagsStretchRatio="0.5" Margin="0 0 10 0">
|
||||
<!-- Populated with C# code -->
|
||||
</BoxContainer>
|
||||
|
||||
<PanelContainer StyleClasses="LowDivider" VerticalExpand="True" Margin="0 0 0 0" SetWidth="2">
|
||||
<PanelContainer.PanelOverride>
|
||||
<gfx:StyleBoxFlat BackgroundColor="#FFFFFF" />
|
||||
</PanelContainer.PanelOverride>
|
||||
</PanelContainer>
|
||||
|
||||
<!-- Access levels -->
|
||||
<ScrollContainer HorizontalExpand="True" VerticalExpand="True" Margin="10 0 0 0">
|
||||
<BoxContainer Name="AccessLevelChecklist" Access="Public" Orientation="Vertical" HorizontalAlignment="Left">
|
||||
<!-- Populated with C# code -->
|
||||
</BoxContainer>
|
||||
</ScrollContainer>
|
||||
</BoxContainer>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user