From 8736d3f4b7b644dfeb7cf7eb3167b6bf53e66d1a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9mie=20GABOLDE?= Date: Mon, 9 Dec 2024 00:57:09 +0100 Subject: [PATCH] Env stuff --- AssetSources/Meshes/STM_TunelRocks_01.fbx | 3 + AssetSources/Meshes/STM_TunnelRocks.blend | 3 + AssetSources/Meshes/STM_VolcanicRocks.blend | 4 +- AssetSources/Meshes/STM_VolcanicRocks_01.fbx | 4 +- AssetSources/Meshes/STM_VolcanicRocks_02.fbx | 3 + ...oncave_object_to_convex_objects_by_face.py | 619 ++++++++++++++++++ Config/DefaultEngine.ini | 1 - .../Geology/STM_TunelRocks_01.uasset | 3 + .../Geology/STM_VolcanicRocks_01.uasset | 4 +- .../Geology/STM_VolcanicRocks_02.uasset | 4 +- Content/Maps/Gym/Gym_Environment.umap | 4 +- Content/Maps/Gym/Gym_Nanite.umap | 3 + .../bozarre/Rectangle_DF853BE6.uasset | 3 + .../MasterMaterials/M_LD_Lava.uasset | 4 +- .../MI_LD_Color_GreyDark.uasset | 3 + .../MaterialInstances/MI_LD_Lava_Base.uasset | 3 + Content/Meshes/Sphere.uasset | 3 + GrapplingGravity.uproject | 7 + 18 files changed, 665 insertions(+), 13 deletions(-) create mode 100644 AssetSources/Meshes/STM_TunelRocks_01.fbx create mode 100644 AssetSources/Meshes/STM_TunnelRocks.blend create mode 100644 AssetSources/Meshes/STM_VolcanicRocks_02.fbx create mode 100644 AssetSources/Meshes/concave_object_to_convex_objects_by_face.py create mode 100644 Content/Environment/Geology/STM_TunelRocks_01.uasset create mode 100644 Content/Maps/Gym/Gym_Nanite.umap create mode 100644 Content/Maps/Gym/_GENERATED/bozarre/Rectangle_DF853BE6.uasset create mode 100644 Content/Materials/MaterialInstances/MI_LD_Color_GreyDark.uasset create mode 100644 Content/Materials/MaterialInstances/MI_LD_Lava_Base.uasset create mode 100644 Content/Meshes/Sphere.uasset diff --git a/AssetSources/Meshes/STM_TunelRocks_01.fbx b/AssetSources/Meshes/STM_TunelRocks_01.fbx new file mode 100644 index 0000000..43023b5 --- /dev/null +++ b/AssetSources/Meshes/STM_TunelRocks_01.fbx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c101da5e27ff9a08a3602decd39589e9b511e56b5cee9a8553601a3f3870785 +size 153772 diff --git a/AssetSources/Meshes/STM_TunnelRocks.blend b/AssetSources/Meshes/STM_TunnelRocks.blend new file mode 100644 index 0000000..79a3410 --- /dev/null +++ b/AssetSources/Meshes/STM_TunnelRocks.blend @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a6bfaa2579d12c5c5a705b160766588e0ddad5a348f21a455b64ed48e583b277 +size 1033305 diff --git a/AssetSources/Meshes/STM_VolcanicRocks.blend b/AssetSources/Meshes/STM_VolcanicRocks.blend index f6c60a9..607755a 100644 --- a/AssetSources/Meshes/STM_VolcanicRocks.blend +++ b/AssetSources/Meshes/STM_VolcanicRocks.blend @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:86a58455544e2bc24f568a4ae323b37f503e9f5b6ec68a236e869a4044644111 -size 1409739 +oid sha256:a509e398f38b845debea9bd3feffed5b7f419ece2faeeb9235214d02556e815f +size 1492247 diff --git a/AssetSources/Meshes/STM_VolcanicRocks_01.fbx b/AssetSources/Meshes/STM_VolcanicRocks_01.fbx index b31d7ee..0b1f530 100644 --- a/AssetSources/Meshes/STM_VolcanicRocks_01.fbx +++ b/AssetSources/Meshes/STM_VolcanicRocks_01.fbx @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:86427898fa24e12ad7be48b2e7b0167f65afc7459f2430eddc69d12b231df7b5 -size 829964 +oid sha256:18dd1df9621f67f5d4ede1ff4794d1b3f65b64efe422e1c3b0692149807cfffb +size 1023612 diff --git a/AssetSources/Meshes/STM_VolcanicRocks_02.fbx b/AssetSources/Meshes/STM_VolcanicRocks_02.fbx new file mode 100644 index 0000000..1cdf9f6 --- /dev/null +++ b/AssetSources/Meshes/STM_VolcanicRocks_02.fbx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c08746860c92272592354583b60c3dbdd22076af48552743b4ff9aaced7890a4 +size 934780 diff --git a/AssetSources/Meshes/concave_object_to_convex_objects_by_face.py b/AssetSources/Meshes/concave_object_to_convex_objects_by_face.py new file mode 100644 index 0000000..b308ed0 --- /dev/null +++ b/AssetSources/Meshes/concave_object_to_convex_objects_by_face.py @@ -0,0 +1,619 @@ +## +# A script to split simple, architectural geometry into convex pieces. +# +# This script makes use of Blender's built-in "Split Concave Faces" clean-up +# algorithm to break-up the faces of an object into convex pieces. The script +# attempts to identify all the edges that represent convex boundaries, and then +# it splits objects up along those edges. Each resulting piece is then made into +# a closed object by converting it into a convex hull. +# +# Be sure to select the object you wish the split into convex pieces before +# running the script. +# +# NOTE: This script is expecting to work with flat, reasonably clean geometry. +# For example, it is expected to be used when generating collision on the +# ceiling and walls of an architectural visualization project, but is not +# expected to perform well with round or n-gon geometry. Using +# create_closed_objects=True and matchup_degenerates=True, in particular, does +# not work well with objects that have openings inside. +# +# If this script doesn't work for you, a plug-in like V-HACD may work better. +# This script was written to handle cases V-HACD did not handle well -- flat, +# reasonably rectangular arch. vis. geometry. +# +# @author Guy Elsmore-Paddock +# + +import bmesh +import bpy +import operator +import re + +from itertools import combinations, count +from math import atan2, pi, radians, degrees +from mathutils import Vector + + +def split_into_convex_pieces(ob, create_closed_objects=True, + matchup_degenerates=True): + object_name = ob.name + + deselect_all_objects() + make_all_faces_convex(ob) + + eliminated_piece_names = \ + split_on_convex_boundaries( + ob, + create_closed_objects, + matchup_degenerates + ) + + rename_pieces(object_name, eliminated_piece_names) + + # Deselect everything, for the convenience of the user. + deselect_all_objects() + + +def make_all_faces_convex(ob): + bpy.context.view_layer.objects.active = ob + bpy.ops.object.mode_set(mode='EDIT') + + # This is what actually defines the new geometry -- Blender creates the + # convex shapes we need to split by. + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.vert_connect_concave() + bpy.ops.mesh.select_all(action='DESELECT') + + +## +# Splits an object into smaller pieces by its convex, planar edges. +# +# In an ideal world, we could just split the object by all the edges that are +# attached to -- and are co-planar with -- the faces of the object, since those +# edges are most likely to represent the convex boundaries of the object. But, +# Blender does not provide an easy way to find such edges. +# +# Instead, we use several heuristics to simulate this type of selection: +# 1. First, we select all the sharp edges of the object, since sharp edges are +# only co-planar with one of the faces they connect with and are therefore +# unlikely to represent convex boundary edges. +# 2. Second, we select all edges that are similar in angle to the sharp edges, +# to catch any edges that are almost steep enough to be sharp edges. +# 3. Third, we invert the selection, which should (hopefully) cause all the +# convex boundary edges we want to be selected. +# 4. Fourth, we seek out any sharp edges that connect with the convex boundary +# edges, since we will need to split on these edges as well so that our +# "cuts" go all the way around the object (e.g. if the convex boundary +# edges lay on the top and bottom faces of an object, we need to select +# sharp edges to connect the top and bottom edges on the left and right +# sides so that each split piece is a complete shape instead of just +# disconnected, detached planes). +# 4. Next, we split the object by all selected edges, which should result in +# creation of each convex piece we seek. +# +def split_on_convex_boundaries(ob, create_closed_objects=True, + matchup_degenerates=True): + bpy.ops.object.mode_set(mode='EDIT') + + select_convex_boundary_edges(ob) + + # Now perform an vertex + edge split along each selected edge, which should + # result in the object being broken-up along each planar edge and connected + # sharp edges (e.g. on corners). + bpy.ops.mesh.edge_split(type='VERT') + + # Now, just break each loose part off into a separate object. + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.separate(type='LOOSE') + + if create_closed_objects: + # And then make each piece fully enclosed. + return create_closed_shapes_from_pieces(ob, matchup_degenerates) + else: + return [] + + +## +# Selects all edges that denote the boundaries of convex pieces. +# +# This is a multi-step process driven by heuristics: +# 1. First, we select all the sharp edges of the object, since sharp edges are +# only co-planar with one of the faces they connect with and are therefore +# unlikely to represent convex boundary edges. +# 2. Second, we select all edges that are similar in length to the sharp +# edges, to catch any edges that are almost steep enough to be sharp edges. +# 3. Third, we invert the selection, which should (hopefully) cause all the +# convex boundary edges we want to be selected. +# +def select_convex_boundary_edges(ob, max_edge_length_proportion=0.1): + bpy.ops.object.mode_set(mode='EDIT') + + mesh = ob.data + bm = bmesh.from_edit_mesh(mesh) + + # Enter "Edge" select mode + bpy.context.tool_settings.mesh_select_mode = [False, True, False] + + # Find all sharp edges and edges of similar length + bpy.ops.mesh.select_all(action='DESELECT') + bpy.ops.mesh.edges_select_sharp() + bpy.ops.mesh.select_similar(type='LENGTH', threshold=0.01) + + # Invert the selection to find the convex boundary edges. + bpy.ops.mesh.select_all(action='INVERT') + + bm.faces.ensure_lookup_table() + bm.edges.ensure_lookup_table() + + edges_to_select = [] + max_edge_length = max(ob.dimensions) * max_edge_length_proportion + + for selected_edge in [e for e in bm.edges if e.select]: + edge_bridges =\ + find_shortest_edge_bridges( + selected_edge, + max_edge_length=max_edge_length + ) + + for path in edge_bridges.values(): + for edge in path: + edges_to_select.append(edge) + + # Select the edges after we pick which edges we *want* to select, to ensure + # that we only base our decisions on the initial convex boundary edges. + for edge in edges_to_select: + edge.select = True + + +## +# Locate the shortest path of edges to connect already-selected edges. +# +# This is used to find the additional edges that must be selected for a cut +# along a convex boundary to create a complete, closed object shape. +# +# The max edge length argument can be provided to avoid trying to find +# connections between convex boundaries that are very far apart in the same +# object. +# +def find_shortest_edge_bridges(starting_edge, max_edge_length=None): + edge_bridges = find_bridge_edges(starting_edge, max_edge_length) + sorted_edge_bridges = sorted(edge_bridges, key=lambda eb: eb[0]) + edge_solutions = {} + + for edge_bridge in sorted_edge_bridges: + path_distance, final_edge, path = edge_bridge + + # Skip edges we've already found a min-length path to + if final_edge not in edge_solutions.keys(): + edge_solutions[final_edge] = path + + print(f"Shortest edge bridges for starting edge '{str(starting_edge.index)}':") + + if len(edge_solutions) > 0: + print( + " - " + + "\n - ".join(map( + lambda i: str( + (i[0].index, str(list(map(lambda e: e.index, i[1])))) + ), + edge_solutions.items() + ))) + print("") + print("") + + return edge_solutions + + +## +# Performs a recursive, depth-first search from a selected edge to other edges. +# +# This returns all possible paths -- and distances of those paths -- to traverse +# the mesh from the starting, selected edge to another selected edge. To avoid +# a lengthy search, the max_depth parameter controls how many levels of edges +# are searched. +# +# The result is an array of tuples, where each tuple contains the total distance +# of the path, the already-selected edge that the path was able to reach, and +# the list of edges that would need to be selected in order to reach that +# destination edge. +# +def find_bridge_edges(edge, max_edge_length=None, max_depth=3, current_depth=0, + path_distance=0, edge_path=None, seen_verts=None): + if edge_path is None: + edge_path = [] + + if seen_verts is None: + seen_verts = [] + + # Don't bother searching edges we've seen + if edge in edge_path: + return [] + + if (current_depth > 0): + first_edge = edge_path[0] + edge_length = edge.calc_length() + + # Don't bother searching edges along the same normal as the first edge. + # We want our cuts to be along convex boundaries that are perpendicular. + if have_common_face(first_edge, edge): + return [] + + if edge.select: + return [(path_distance, edge, edge_path)] + + # Disqualify edges that are too long. + if max_edge_length is not None and edge_length > max_edge_length: + print( + f"Disqualifying edge {edge.index} because length [{edge_length}] > [{max_edge_length}" + ) + + return [] + + if current_depth == max_depth: + return [] + + new_edge_path = edge_path + [edge] + bridges = [] + + for edge_vert in edge.verts: + # Don't bother searching vertices we've already seen (no backtracking). + if edge_vert in seen_verts: + continue + + new_seen_verts = seen_verts + [edge_vert] + + for linked_edge in edge_vert.link_edges: + # Don't bother searching selected edges connected to the starting + # edge, since that gets us nowhere. + if linked_edge.select and current_depth == 0: + continue + + edge_length = linked_edge.calc_length() + + found_bridge_edges = find_bridge_edges( + edge=linked_edge, + max_edge_length=max_edge_length, + max_depth=max_depth, + current_depth=current_depth + 1, + path_distance=path_distance + edge_length, + edge_path=new_edge_path, + seen_verts=new_seen_verts + ) + + bridges.extend(found_bridge_edges) + + return bridges + + +def create_closed_shapes_from_pieces(ob, matchup_degenerates=True, + min_volume=0.1): + print("Converting each piece into a closed object...") + + degenerate_piece_names = [] + + for piece in name_duplicates_of(ob): + if not make_piece_convex(piece): + degenerate_piece_names.append(piece.name) + + degenerate_count = len(degenerate_piece_names) + + print("") + print(f"Total degenerate (flat) pieces: {degenerate_count}") + print("") + + eliminated_piece_names = [] + + if matchup_degenerates: + if degenerate_count > 10: + # TODO: Hopefully, some day, find a good deterministic way to + # automatically match up any number of degenerate pieces using a + # heuristic that generates sane geometry. + print( + "There are too many degenerates for reliable auto-matching, so " + "it will not be performed. You will need to manually combine " + "degenerate pieces.") + print("") + else: + eliminated_piece_names.extend( + matchup_degenerate_pieces(degenerate_piece_names, min_volume) + ) + + eliminated_piece_names.extend( + eliminate_tiny_pieces(degenerate_piece_names, min_volume) + ) + + return eliminated_piece_names + + +def matchup_degenerate_pieces(degenerate_piece_names, min_volume=0.1): + pieces_eliminated = [] + degenerate_volumes = find_degenerate_combos(degenerate_piece_names) + + print("Searching for a way to match-up degenerates into volumes...") + + for piece1_name, piece1_volumes in degenerate_volumes.items(): + # Skip pieces already joined with degenerate pieces we've processed + if piece1_name not in degenerate_piece_names: + continue + + piece1 = bpy.data.objects[piece1_name] + + piece1_volumes_asc = dict( + sorted( + piece1_volumes.items(), + key=operator.itemgetter(1) + ) + ) + + piece2 = None + + for piece2_name, combo_volume in piece1_volumes_asc.items(): + # Skip pieces that would make a volume that's too small, or that + # have been joined with degenerate pieces we've processed + if combo_volume < min_volume or piece2_name not in degenerate_piece_names: + continue + else: + piece2 = bpy.data.objects[piece2_name] + break + + if piece2 is not None: + degenerate_piece_names.remove(piece2.name) + pieces_eliminated.append(piece2.name) + + print( + f" - Combining parallel degenerate '{piece1.name}' with " + f"'{piece2.name}' to form complete mesh '{piece1.name}'." + ) + + bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.select_all(action='DESELECT') + + bpy.context.view_layer.objects.active = piece1 + + piece1.select_set(True) + piece2.select_set(True) + + bpy.ops.object.join() + + make_piece_convex(piece1) + + return pieces_eliminated + + +def find_degenerate_combos(degenerate_piece_names): + volumes = {} + + for piece_combo in combinations(degenerate_piece_names, 2): + piece1_name, piece2_name = piece_combo + piece1 = bpy.data.objects[piece1_name] + piece2 = bpy.data.objects[piece2_name] + + if not volumes.get(piece1_name): + volumes[piece1_name] = {} + + piece1_mesh = piece1.data + piece1_bm = bmesh.new() + piece1_bm.from_mesh(piece1_mesh) + + piece2_mesh = piece2.data + piece2_bm = bmesh.new() + piece2_bm.from_mesh(piece2_mesh) + + piece1_bm.faces.ensure_lookup_table() + piece2_bm.faces.ensure_lookup_table() + + if (len(piece1_bm.faces) == 0) or (len(piece2_bm.faces) == 0): + continue + + piece1_face = piece1_bm.faces[0] + piece2_face = piece2_bm.faces[0] + + combo_angle_radians = piece1_face.normal.angle(piece2_face.normal) + combo_angle_degrees = int(round(degrees(combo_angle_radians))) + + # We only combine faces that are parallel to each other + if combo_angle_degrees in [0, 180]: + combo_volume = convex_volume(piece1, piece2) + + volumes[piece1.name][piece2.name] = combo_volume + + return volumes + + +def eliminate_tiny_pieces(degenerate_piece_names, min_volume=0.1): + eliminated_piece_names = [] + + tiny_piece_names = [ + n for n in degenerate_piece_names + if n not in eliminated_piece_names + and convex_volume(bpy.data.objects.get(n)) < min_volume + ] + + print("") + print(f"Total remaining tiny pieces: {len(tiny_piece_names)}") + + # Delete tiny pieces that are too small to be useful + for tiny_piece_name in tiny_piece_names: + print(f" - Eliminating tiny piece '{tiny_piece_name}'...") + + tiny_piece = bpy.data.objects[tiny_piece_name] + + bpy.data.objects.remove(tiny_piece, do_unlink=True) + eliminated_piece_names.append(tiny_piece_name) + + print("") + + return eliminated_piece_names + + +def make_piece_convex(ob, min_volume=0.1): + print( + f" - Attempting to make '{ob.name}' into a closed, convex " + f"shape." + ) + + volume_before = convex_volume(ob) + + make_convex_hull(ob) + + volume_after = convex_volume(ob) + volume_delta = abs(volume_after - volume_before) + + # If the volume of the piece is very small when we tried making it convex, + # then it's degenerate -- it's a plane or something flat that we need to + # remove. + is_degenerate = (volume_after < min_volume) + + print(f" - Volume before: {volume_before}") + print(f" - Volume after: {volume_after}") + print(f" - Volume delta: {volume_delta}") + print(f" - Is degenerate: {is_degenerate}") + + return not is_degenerate + + +def make_convex_hull(ob): + deselect_all_objects() + + bpy.context.view_layer.objects.active = ob + ob.select_set(True) + + bpy.ops.object.mode_set(mode='EDIT') + + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.convex_hull() + + mesh = ob.data + bm = bmesh.from_edit_mesh(mesh) + + # Clean-up unnecessary edges + bmesh.ops.dissolve_limit( + bm, + angle_limit=radians(5), + verts=bm.verts, + edges=bm.edges, + ) + + bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.select_all(action='DESELECT') + + +def have_common_normal(e1, e2): + e1_normals = map(lambda f: f.normal, e1.link_faces) + e2_normals = map(lambda f: f.normal, e2.link_faces) + + common_normals = [n for n in e1_normals if n in e2_normals] + + return len(common_normals) > 0 + + +def have_common_face(e1, e2): + common_faces = [f for f in e1.link_faces if f in e2.link_faces] + + return len(common_faces) > 0 + + +def convex_volume(*obs): + meshes = [] + verts = [] + + for ob in obs: + mesh = ob.data + bm = bmesh.new() + + bm.from_mesh(mesh) + + bm.verts.ensure_lookup_table() + bm.edges.ensure_lookup_table() + bm.faces.ensure_lookup_table() + + # Prevent early garbage collection. + meshes.append(bm) + + geom = list(bm.verts) + list(bm.edges) + list(bm.faces) + + for g in geom: + if hasattr(g, "verts"): + verts.extend(v.co for v in g.verts) + else: + verts.append(g.co) + + return build_volume_from_verts(verts) + + +def build_volume_from_verts(verts): + # Based on code from: + # https://blender.stackexchange.com/questions/107357/how-to-find-if-geometry-linked-to-an-edge-is-coplanar + origin = sum(verts, Vector((0, 0, 0))) / len(verts) + bm = bmesh.new() + + for v in verts: + bm.verts.new(v - origin) + + bmesh.ops.convex_hull(bm, input=bm.verts) + + return bm.calc_volume() + + +def deselect_all_objects(): + try: + bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.select_all(action='DESELECT') + except: + pass + + +def rename_pieces(object_name, name_skiplist=None): + if name_skiplist is None: + name_skiplist = [] + + for duplicate_name, old_index_str, new_index in dupe_name_sequence(object_name, name_skiplist): + piece = bpy.data.objects.get(duplicate_name) + + if not piece: + break + + old_name = piece.name + new_name = re.sub(fr"(?:01)?\.{old_index_str}$", f"{new_index:02d}", piece.name) + + if old_name != new_name: + print(f"Renaming piece '{old_name}' to '{new_name}'.") + piece.name = new_name + + +def name_duplicates_of(ob): + duplicates = [] + + for duplicate_name, _, _ in dupe_name_sequence(ob.name): + piece = bpy.data.objects.get(duplicate_name) + + if not piece: + break + else: + duplicates.append(piece) + + return duplicates + + +def dupe_name_sequence(base_name, skiplist=None): + if skiplist is None: + skiplist = [] + + yield base_name, "", 1 + + new_index = 1 + + for old_name_index in count(start=1): + old_index_str = f"{old_name_index:03d}" + duplicate_name = f"{base_name}.{old_index_str}" + + if duplicate_name in skiplist: + continue + else: + new_index = new_index + 1 + + yield duplicate_name, old_index_str, new_index + + +split_into_convex_pieces(bpy.context.view_layer.objects.active) +print("Done!") diff --git a/Config/DefaultEngine.ini b/Config/DefaultEngine.ini index cda3d42..f7c5284 100644 --- a/Config/DefaultEngine.ini +++ b/Config/DefaultEngine.ini @@ -212,7 +212,6 @@ ReverbPlugin= OcclusionPlugin= SoundCueCookQualityIndex=-1 -TargetedRHIs=SF_VULKAN_SM5 -+TargetedRHIs=SF_VULKAN_SM5 +TargetedRHIs=SF_VULKAN_SM6 [/Script/Engine.PhysicsSettings] diff --git a/Content/Environment/Geology/STM_TunelRocks_01.uasset b/Content/Environment/Geology/STM_TunelRocks_01.uasset new file mode 100644 index 0000000..ec35b03 --- /dev/null +++ b/Content/Environment/Geology/STM_TunelRocks_01.uasset @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a52932df2cf0b294c7e3b873a297cbf88be594d8fa187703ccdbdbd0850afe6 +size 179934 diff --git a/Content/Environment/Geology/STM_VolcanicRocks_01.uasset b/Content/Environment/Geology/STM_VolcanicRocks_01.uasset index 5dd957d..7230e1f 100644 --- a/Content/Environment/Geology/STM_VolcanicRocks_01.uasset +++ b/Content/Environment/Geology/STM_VolcanicRocks_01.uasset @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:90d6d95273791a6ab690179529509c5f558f6f830a54c32027e43db5341dedd8 -size 659098 +oid sha256:dbf2f438c7d8562df7d8f6b82081d37d275530c05b8decb1037bf96808deb882 +size 1055557 diff --git a/Content/Environment/Geology/STM_VolcanicRocks_02.uasset b/Content/Environment/Geology/STM_VolcanicRocks_02.uasset index cbfa279..670a4a5 100644 --- a/Content/Environment/Geology/STM_VolcanicRocks_02.uasset +++ b/Content/Environment/Geology/STM_VolcanicRocks_02.uasset @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d05ec6888d89f359870dcc912e4a467d2f13cf325e00b351886d68e082c99818 -size 609782 +oid sha256:a00f7273591572ac8da34ddbaf2cf159686956f3ee7a2e512b56e0648a869009 +size 1002204 diff --git a/Content/Maps/Gym/Gym_Environment.umap b/Content/Maps/Gym/Gym_Environment.umap index ec06053..1bae92e 100644 --- a/Content/Maps/Gym/Gym_Environment.umap +++ b/Content/Maps/Gym/Gym_Environment.umap @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:165b403a82b9c49a7e1765518407231aaf46e816645b8c805221e8aced1a8514 -size 104163 +oid sha256:1a2543707993ca0828f982dfd2ea27a60b84f6627e21974a5dbea1acd059a8d7 +size 135517 diff --git a/Content/Maps/Gym/Gym_Nanite.umap b/Content/Maps/Gym/Gym_Nanite.umap new file mode 100644 index 0000000..fc55f94 --- /dev/null +++ b/Content/Maps/Gym/Gym_Nanite.umap @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45e11d2c450d87249714f136d84724d1fc99d792ec38e79dc6f36abd6beb58b7 +size 47875 diff --git a/Content/Maps/Gym/_GENERATED/bozarre/Rectangle_DF853BE6.uasset b/Content/Maps/Gym/_GENERATED/bozarre/Rectangle_DF853BE6.uasset new file mode 100644 index 0000000..93d76a8 --- /dev/null +++ b/Content/Maps/Gym/_GENERATED/bozarre/Rectangle_DF853BE6.uasset @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d606e440f57420544c673df4d3c67cb2fbd146b8979f08ae9d6156f09215302f +size 16230 diff --git a/Content/Materials/MasterMaterials/M_LD_Lava.uasset b/Content/Materials/MasterMaterials/M_LD_Lava.uasset index baceb82..6a2e20e 100644 --- a/Content/Materials/MasterMaterials/M_LD_Lava.uasset +++ b/Content/Materials/MasterMaterials/M_LD_Lava.uasset @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:90b16b7dde7fb3df3693317dae4cee7a2f7bb4b81720a0ea000fedad58c7e2e1 -size 45061 +oid sha256:52fbf2831c308d56bc2ed46c1eac7f29612ce852c444a9f7c4acac16952112d9 +size 36084 diff --git a/Content/Materials/MaterialInstances/MI_LD_Color_GreyDark.uasset b/Content/Materials/MaterialInstances/MI_LD_Color_GreyDark.uasset new file mode 100644 index 0000000..43af8a0 --- /dev/null +++ b/Content/Materials/MaterialInstances/MI_LD_Color_GreyDark.uasset @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8a71594f4cb3d3c64986f250dfba16e56e6852f7b18254b4bcabbaf88061b64 +size 7698 diff --git a/Content/Materials/MaterialInstances/MI_LD_Lava_Base.uasset b/Content/Materials/MaterialInstances/MI_LD_Lava_Base.uasset new file mode 100644 index 0000000..9f587e3 --- /dev/null +++ b/Content/Materials/MaterialInstances/MI_LD_Lava_Base.uasset @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f33441f91715fffd8567abe4290660ca025617eb1e4f0791d7b92dd1d369615c +size 13907 diff --git a/Content/Meshes/Sphere.uasset b/Content/Meshes/Sphere.uasset new file mode 100644 index 0000000..0146e00 --- /dev/null +++ b/Content/Meshes/Sphere.uasset @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f2cee2d8bcdc0c4264c564eebd04c71cff9a710adc40bf73bbc7bef5f96df03 +size 62961 diff --git a/GrapplingGravity.uproject b/GrapplingGravity.uproject index de7f60f..8412b22 100644 --- a/GrapplingGravity.uproject +++ b/GrapplingGravity.uproject @@ -25,6 +25,13 @@ { "Name": "GeometryScripting", "Enabled": true + }, + { + "Name": "TextureGraph", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ] } ] } \ No newline at end of file