Planet游戏开发 其一

image.png

想法

想搞个类似《mountain》的禅意游戏,主角是一颗星球,能看到四季交替、水流、最好还有生物。同时希望能多学习积累一些pcg,shader编写之类的技术(方便炫技)。

一开始看了一些类似《minecraft》生成的文章,但是思考了下还是和我要的不太一样,

https://www.youtube.com/watch?v=lctXaT9pxA0 后来看到了这个视频,真不错啊,大概上就是我想要的样子,但是这个管主的这个系列偏向于造出整个星系,类似《无人深空》的效果,能登陆每颗行星,能在上面建造破坏,整个星系还根据物理计算真正的轨道运作。我觉得我的目标更专注于一颗行星,尽可能做到细节。

实现建模球体

四种建模球体的方法
https://medium.com/@oscarsc/four-ways-to-create-a-mesh-for-a-sphere-d7956b825db4

视频作者使用的是两个四棱锥来建模球体,有更均匀的三角形分布
image.png

我使用的是球体立方体的方法,便于后期做LOD,但是有不少接缝、着色相关的问题

一开始啥都不会,对着这篇教程学习如何用程序建立mesh的,另外翻译的一般,可以前往原文
https://zhuanlan.zhihu.com/p/96084686

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
using UnityEngine;


[ExecuteInEditMode]
[RequireComponent(typeof(MeshFilter), typeof(MeshRenderer))]
public class CubeSphereScript : MonoBehaviour
{

public int gridSize;
public float radius = 1.0f;
public ComputeShader shader;
public float testvalue;

private Mesh mesh;
private Vector3[] vertices;
private Vector3[] normals;

private void Update()
{
if (!UnityEditor.EditorApplication.isPlaying)
{
Initialize();
Generate();
}
}

private void Initialize()
{
// use random seed to Initialize world

}

private void Test()
{
for (int i = 0; i < vertices.Length; i ++)
{
vertices[i] += 0.5f * Mathf.Sin(vertices[i].y * testvalue) * normals[i];
}
}

private void Generate()
{
GetComponent<MeshFilter>().mesh = mesh = new Mesh();
mesh.name = "Procedural Cube";
CreateVertices();
Test();
mesh.vertices = vertices;
mesh.normals = normals;
CreateTriangles();
}

private void CreateVertices()
{
int cornerVertices = 8;
int edgeVertices = (gridSize * 3 - 3) * 4;
int faceVertices = (gridSize - 1) * (gridSize - 1) * 6;
vertices = new Vector3[cornerVertices + edgeVertices + faceVertices];
normals = new Vector3[vertices.Length];

int v = 0;
for (int y = 0; y <= gridSize; y++)
{
for (int x = 0; x <= gridSize; x++)
{
SetVertex(v++, x, y, 0);
}
for (int z = 1; z <= gridSize; z++)
{
SetVertex(v++, gridSize, y, z);
}
for (int x = gridSize - 1; x >= 0; x--)
{
SetVertex(v++, x, y, gridSize);
}
for (int z = gridSize - 1; z > 0; z--)
{
SetVertex(v++, 0, y, z);
}
}
for (int z = 1; z < gridSize; z++)
{
for (int x = 1; x < gridSize; x++)
{
SetVertex(v++, x, gridSize, z);
}
}
for (int z = 1; z < gridSize; z++)
{
for (int x = 1; x < gridSize; x++)
{
SetVertex(v++, x, 0, z);
}
}
}

private void SetVertex(int i, int x, int y, int z)
{
Vector3 v = new Vector3(x, y, z) * 2f / gridSize - Vector3.one;
float x2 = v.x * v.x;
float y2 = v.y * v.y;
float z2 = v.z * v.z;
Vector3 s;
s.x = v.x * Mathf.Sqrt(1f - y2 / 2f - z2 / 2f + y2 * z2 / 3f);
s.y = v.y * Mathf.Sqrt(1f - x2 / 2f - z2 / 2f + x2 * z2 / 3f);
s.z = v.z * Mathf.Sqrt(1f - x2 / 2f - y2 / 2f + x2 * y2 / 3f);
normals[i] = s;

vertices[i] = normals[i] * radius;
}

private void CreateTriangles()
{
int quads = gridSize * gridSize * 6;
int[] triangles = new int[quads * 6];
int ring = gridSize * 4;
int t = 0, v = 0;
for (int y = 0; y < gridSize; y++, v++)
{
for (int q = 0; q < ring - 1; q++, v++)
{
t = SetQuad(triangles, t, v, v + 1, v + ring, v + ring + 1);
}
t = SetQuad(triangles, t, v, v - ring + 1, v + ring, v + 1);

}
t = CreateTopFace(triangles, t, ring);
t = CreateBottomFace(triangles, t, ring);

mesh.triangles = triangles;
}

private int CreateTopFace(int[] triangles, int t, int ring)
{
int v = ring * gridSize;
for (int x = 0; x < gridSize - 1; x++, v++)
{
t = SetQuad(triangles, t, v, v + 1, v + ring - 1, v + ring);
}
t = SetQuad(triangles, t, v, v + 1, v + ring - 1, v + 2);

int vMin = ring * (gridSize + 1) - 1;
int vMid = vMin + 1;
int vMax = v + 2;

for (int z = 1; z < gridSize - 1; z++, vMin--, vMid++, vMax++)
{
t = SetQuad(triangles, t, vMin, vMid, vMin - 1, vMid + gridSize - 1);
for (int x = 1; x < gridSize - 1; x++, vMid++)
{
t = SetQuad(
triangles, t,
vMid, vMid + 1, vMid + gridSize - 1, vMid + gridSize);
}
t = SetQuad(triangles, t, vMid, vMax, vMid + gridSize - 1, vMax + 1);
}

int vTop = vMin - 2;
t = SetQuad(triangles, t, vMin, vMid, vTop + 1, vTop);
for (int x = 1; x < gridSize - 1; x++, vTop--, vMid++)
{
t = SetQuad(triangles, t, vMid, vMid + 1, vTop, vTop - 1);
}
t = SetQuad(triangles, t, vMid, vTop - 2, vTop, vTop - 1);
return t;
}

private int CreateBottomFace(int[] triangles, int t, int ring)
{
int v = 1;
int vMid = vertices.Length - (gridSize - 1) * (gridSize - 1);
t = SetQuad(triangles, t, ring - 1, vMid, 0, 1);
for (int x = 1; x < gridSize - 1; x++, v++, vMid++)
{
t = SetQuad(triangles, t, vMid, vMid + 1, v, v + 1);
}
t = SetQuad(triangles, t, vMid, v + 2, v, v + 1);

int vMin = ring - 2;
vMid -= gridSize - 2;
int vMax = v + 2;

for (int z = 1; z < gridSize - 1; z++, vMin--, vMid++, vMax++)
{
t = SetQuad(triangles, t, vMin, vMid + gridSize - 1, vMin + 1, vMid);
for (int x = 1; x < gridSize - 1; x++, vMid++)
{
t = SetQuad(
triangles, t,
vMid + gridSize - 1, vMid + gridSize, vMid, vMid + 1);
}
t = SetQuad(triangles, t, vMid + gridSize - 1, vMax + 1, vMid, vMax);
}

int vTop = vMin - 1;
t = SetQuad(triangles, t, vTop + 1, vTop, vTop + 2, vMid);
for (int x = 1; x < gridSize - 1; x++, vTop--, vMid++)
{
t = SetQuad(triangles, t, vTop, vTop - 1, vMid, vMid + 1);
}
t = SetQuad(triangles, t, vTop, vTop - 1, vMid, vTop - 2);

return t;
}

private static int SetQuad(int[] triangles, int i, int v00, int v10, int v01, int v11)
{
triangles[i] = v00;
triangles[i + 1] = triangles[i + 4] = v01;
triangles[i + 2] = triangles[i + 3] = v10;
triangles[i + 5] = v11;
return i + 6;
}

}

其中Test()函数是我对球体表面的一个尝试修改,视频中使用了compute shader来做这件事,目前我还在研究怎么使用compute shader…所以暂时还是在c#中实现了下

在editmode下效果不错!
image.png

不过当我尝试增加gridSize,也就是球体立方体的每个面边长,发现105以下好好的,但高于105就会感觉抽风了,少了一部分,一部分又乱了套。开始我以为是代码哪里的某个变量是不是到了int类型的上界,仔细检查了好久…结果是因为unity的一个静态批处理中,一个mesh的默认顶点上限是64000个,而105这个神奇的数字之后正好超了…

解决方法就是把这个球体拆分成几个部分,或者也可以直接上LOD,不过这个问题不大,只需要稍作标记,之后再解决。

用compute shader来生成复杂地形

视频中的地形生成,考虑到效率是使用compute shader来做的,之前我也听说过compute shader,以及一些gpgpu的操作,正好可以学习一下。

一个compute shader大致就如下所示

貌似markdown没有shader对应的代码段= =

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
#define Use_Simplex
#include "./Compute/FractalNoise.cginc"

#pragma kernel CSMain

StructuredBuffer<float3> vertices;
RWStructuredBuffer<float> heights;
uint numVertices;
float frequency;
float amplitude;

[numthreads(512, 1, 1)]
void CSMain (uint id : SV_DispatchThreadID)
{
if ( id >= numVertices)
{
return;
}
float3 vertexPos = vertices[id];
heights[id] = 1 + fractal_noise(vertexPos, frequency, amplitude); // 后文中会解释fractal_noise是什么
}

然后用c#来调用

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
public ComputeShader shader;
private Vector3[] vertices;


private void ShaderRun()
{
ComputeHelper.CreateStructuredBuffer<Vector3>(ref vertexBuffer, vertices);
shader.SetInt("numVertices", vertices.Length);
shader.SetFloat("frequency", frequency);
shader.SetFloat("amplitude", amplitude);
shader.SetBuffer(0, "vertices", vertexBuffer);
ComputeHelper.CreateAndSetBuffer<float>(ref heightBuffer, vertexBuffer.count, shader, "heights");


ComputeHelper.Run(shader, vertexBuffer.count);

var heights = new float[vertexBuffer.count];

heightBuffer.GetData(heights);

for (int i = 0; i < vertices.Length; i ++)
{
vertices[i] *= heights[i];
}

heightBuffer.Release();
}

其中这里的ComputerHelper是视频作者封装的一个类,我直接搬运了过来,代码如下

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using UnityEngine;

public static class ComputeHelper
{

// Subscribe to this event to be notified when buffers created in edit mode should be released
// (i.e before script compilation occurs, and when exitting edit mode)
public static event System.Action shouldReleaseEditModeBuffers;

// Convenience method for dispatching a compute shader.
// It calculates the number of thread groups based on the number of iterations needed.
public static void Run(ComputeShader cs, int numIterationsX, int numIterationsY = 1, int numIterationsZ = 1, int kernelIndex = 0)
{
Vector3Int threadGroupSizes = GetThreadGroupSizes(cs, kernelIndex);
int numGroupsX = Mathf.CeilToInt(numIterationsX / (float)threadGroupSizes.x);
int numGroupsY = Mathf.CeilToInt(numIterationsY / (float)threadGroupSizes.y);
int numGroupsZ = Mathf.CeilToInt(numIterationsZ / (float)threadGroupSizes.y);
cs.Dispatch(kernelIndex, numGroupsX, numGroupsY, numGroupsZ);
}

// Only run compute shaders if this is true
// This is only relevant for compute shaders that run outside of playmode
public static bool CanRunEditModeCompute
{
get
{
return CheckIfCanRunInEditMode();
}
}

// Set all values from settings object on the shader. Note, variable names must be an exact match in the shader.
// Settings object can be any class/struct containing vectors/ints/floats/bools
public static void SetParams(System.Object settings, ComputeShader shader, string variableNamePrefix = "", string variableNameSuffix = "")
{
var fields = settings.GetType().GetFields();
foreach (var field in fields)
{
var fieldType = field.FieldType;
string shaderVariableName = variableNamePrefix + field.Name + variableNameSuffix;

if (fieldType == typeof(UnityEngine.Vector4) || fieldType == typeof(Vector3) || fieldType == typeof(Vector2))
{
shader.SetVector(shaderVariableName, (Vector4)field.GetValue(settings));
}
else if (fieldType == typeof(int))
{
shader.SetInt(shaderVariableName, (int)field.GetValue(settings));
}
else if (fieldType == typeof(float))
{
shader.SetFloat(shaderVariableName, (float)field.GetValue(settings));
}
else if (fieldType == typeof(bool))
{
shader.SetBool(shaderVariableName, (bool)field.GetValue(settings));
}
else
{
Debug.Log($"Type {fieldType} not implemented");
}
}
}

public static void CreateStructuredBuffer<T>(ref ComputeBuffer buffer, int count)
{
int stride = System.Runtime.InteropServices.Marshal.SizeOf(typeof(T));
bool createNewBuffer = buffer == null || !buffer.IsValid() || buffer.count != count || buffer.stride != stride;
if (createNewBuffer)
{
Release(buffer);
buffer = new ComputeBuffer(count, stride);
}
}

public static void CreateStructuredBuffer<T>(ref ComputeBuffer buffer, T[] data)
{
CreateStructuredBuffer<T>(ref buffer, data.Length);
buffer.SetData(data);
}

// Test

public static ComputeBuffer CreateAndSetBuffer<T>(T[] data, ComputeShader cs, string nameID, int kernelIndex = 0)
{
ComputeBuffer buffer = null;
CreateAndSetBuffer<T>(ref buffer, data, cs, nameID, kernelIndex);
return buffer;
}

public static void CreateAndSetBuffer<T>(ref ComputeBuffer buffer, T[] data, ComputeShader cs, string nameID, int kernelIndex = 0)
{
int stride = System.Runtime.InteropServices.Marshal.SizeOf(typeof(T));
CreateStructuredBuffer<T>(ref buffer, data.Length);
buffer.SetData(data);
cs.SetBuffer(kernelIndex, nameID, buffer);
}

public static ComputeBuffer CreateAndSetBuffer<T>(int length, ComputeShader cs, string nameID, int kernelIndex = 0)
{
ComputeBuffer buffer = null;
CreateAndSetBuffer<T>(ref buffer, length, cs, nameID, kernelIndex);
return buffer;
}

public static void CreateAndSetBuffer<T>(ref ComputeBuffer buffer, int length, ComputeShader cs, string nameID, int kernelIndex = 0)
{
CreateStructuredBuffer<T>(ref buffer, length);
cs.SetBuffer(kernelIndex, nameID, buffer);
}

// Releases supplied buffer/s if not null
public static void Release(params ComputeBuffer[] buffers)
{
for (int i = 0; i < buffers.Length; i++)
{
if (buffers[i] != null)
{
buffers[i].Release();
}
}
}

public static Vector3Int GetThreadGroupSizes(ComputeShader compute, int kernelIndex = 0)
{
uint x, y, z;
compute.GetKernelThreadGroupSizes(kernelIndex, out x, out y, out z);
return new Vector3Int((int)x, (int)y, (int)z);
}

// https://cmwdexint.com/2017/12/04/computeshader-setfloats/
public static float[] PackFloats(params float[] values)
{
float[] packed = new float[values.Length * 4];
for (int i = 0; i < values.Length; i++)
{
packed[i * 4] = values[i];
}
return values;
}

// Editor helpers:

#if UNITY_EDITOR
static UnityEditor.PlayModeStateChange playModeState;

static ComputeHelper()
{
// Monitor play mode state
UnityEditor.EditorApplication.playModeStateChanged -= MonitorPlayModeState;
UnityEditor.EditorApplication.playModeStateChanged += MonitorPlayModeState;
// Monitor script compilation
UnityEditor.Compilation.CompilationPipeline.compilationStarted -= OnCompilationStarted;
UnityEditor.Compilation.CompilationPipeline.compilationStarted += OnCompilationStarted;
}

static void MonitorPlayModeState(UnityEditor.PlayModeStateChange state)
{
playModeState = state;
if (state == UnityEditor.PlayModeStateChange.ExitingEditMode)
{
if (shouldReleaseEditModeBuffers != null)
{
shouldReleaseEditModeBuffers(); //
}
}
}

static void OnCompilationStarted(System.Object obj)
{
if (shouldReleaseEditModeBuffers != null)
{
shouldReleaseEditModeBuffers();
}
}
#endif

static bool CheckIfCanRunInEditMode()
{
bool isCompilingOrExitingEditMode = false;
#if UNITY_EDITOR
isCompilingOrExitingEditMode |= UnityEditor.EditorApplication.isCompiling;
isCompilingOrExitingEditMode |= playModeState == UnityEditor.PlayModeStateChange.ExitingEditMode;
#endif
bool canRun = !isCompilingOrExitingEditMode;
return canRun;
}
}

视频中作者实现了一个类似月球坑的效果,用几何函数构建出一个大致的形状,然后用平衡的曲线过渡

平滑各种用几何函数产生的僵硬接缝
https://iquilezles.org/articles/smin/

噪声生成地形

先了解一下噪声
游戏开发中的噪声 https://www.cnblogs.com/KillerAery/p/10765897.html
YivanLee关于噪声的总结 https://zhuanlan.zhihu.com/p/47959352

噪声离不开hash算法,这两篇文章中提到的hash算法,目的上就是实现一种可复现的随机,像《minecraft》等游戏,在创建世界前都会设定一个种子,这个种子一直存在并用于地形生成中,对于某个固定的地点,用同样的种子生成的地形就是一样的,这也同时可以用于压缩存档体积,每次需要加载这块区域时当场计算。

噪声函数的作用就是实现一种自然的随机,即生成的东西既要看上去是随机的,不能千篇一律,又要感觉是自然的,而不是像白噪声一样纯粹的随机无规律。

以下摘自 http://www.cnblogs.com/KillerAery/

一般的地形生成中,地形高度场都是通过2D噪声(输入一个二维坐标,输出一个高度值)来生成的,但是一层噪声往往具有单调的特性(单一的频率Frequenccies 和 振幅Amplitudes),不能满足复杂的自然地形高度:地形可能会有大段连绵、高耸山地,也会有丘陵和蚀坑,更小点的有岩石块,甚至更小的鹅卵石块。

为了模拟出这样的自然噪声特性,我们可以借鉴 分形噪声 的思想,通过使用不同的参数进行多几次不同参数的噪声计算,然后将结果叠加在一起。
image.png
image.png

分型噪声的代码如下

1
2
3
4
5
6
7
8
9
10
11
12
13
14
#include "./Noise.cginc"

float fractal_noise_once(float3 pos, float frequency, float amplitude) {
return abs(noise(pos * frequency)) * amplitude;
}

float fractal_noise(float3 pos, float frequency = 1, float amplitude = 1) {
return
fractal_noise_once(pos, 0.125*frequency, 1.0*amplitude)
+ fractal_noise_once(pos, 0.25*frequency, 0.5*amplitude)
+ fractal_noise_once(pos, 0.5*frequency, 0.25*amplitude)
+ fractal_noise_once(pos, 1.0*frequency, 0.125*amplitude);
}

效果还不错
image.png

表面着色

刚建完地形,想随便应用几个材质看看效果,但是发现我的程序生成的mesh应用材质后看不到材质贴图、法线贴图的效果,查了一下,这是因为我在建立mesh时只设置了vertices,normals,triangles等属性,没有设置uv,而uv正好是描述贴图怎么贴的量。
所以想让贴图正常显示必须设置一下uv,或是让贴图怎么贴不依赖于uv。
第一种方法更适合于有美术来设计好贴图,然后直接不做别的处理,贴上去的时候就是完美状态,(立方体球的uv处理很简单,就是直接立方体的每个面贴一张材质,但是这样做)
第二种方法可以在贴材质的时候增加一些处理,比如同时应用多个材质、材质根据vertices位置的渐变等等

关于如何贴材质,这篇文章讲的比较详细
https://bgolus.medium.com/normal-mapping-for-a-triplanar-shader-10bf39dca05a

这边贴出视频作者的贴图代码Triplanar.cginc

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101

float4 triplanar(float3 vertPos, float3 normal, float scale, sampler2D tex) {

// Calculate triplanar coordinates
float2 uvX = vertPos.zy * scale;
float2 uvY = vertPos.xz * scale;
float2 uvZ = vertPos.xy * scale;

float4 colX = tex2D (tex, uvX);
float4 colY = tex2D (tex, uvY);
float4 colZ = tex2D (tex, uvZ);
// Square normal to make all values positive + increase blend sharpness
float3 blendWeight = normal * normal;
// Divide blend weight by the sum of its components. This will make x + y + z = 1
blendWeight /= dot(blendWeight, 1);
return colX * blendWeight.x + colY * blendWeight.y + colZ * blendWeight.z;
}

float4 triplanarOffset(float3 vertPos, float3 normal, float3 scale, sampler2D tex, float2 offset) {
float3 scaledPos = vertPos / scale;
float4 colX = tex2D (tex, scaledPos.zy + offset);
float4 colY = tex2D(tex, scaledPos.xz + offset);
float4 colZ = tex2D (tex,scaledPos.xy + offset);

// Square normal to make all values positive + increase blend sharpness
float3 blendWeight = normal * normal;
// Divide blend weight by the sum of its components. This will make x + y + z = 1
blendWeight /= dot(blendWeight, 1);
return colX * blendWeight.x + colY * blendWeight.y + colZ * blendWeight.z;
}

float3 ObjectToTangentVector(float4 tangent, float3 normal, float3 objectSpaceVector) {
float3 normalizedTangent = normalize(tangent.xyz);
float3 binormal = cross(normal, normalizedTangent) * tangent.w;
float3x3 rot = float3x3 (normalizedTangent, binormal, normal);
return mul(rot, objectSpaceVector);
}

// Reoriented Normal Mapping
// http://blog.selfshadow.com/publications/blending-in-detail/
// Altered to take normals (-1 to 1 ranges) rather than unsigned normal maps (0 to 1 ranges)
float3 blend_rnm(float3 n1, float3 n2)
{
n1.z += 1;
n2.xy = -n2.xy;

return n1 * dot(n1, n2) / n1.z - n2;
}

// Sample normal map with triplanar coordinates
// Returned normal will be in obj/world space (depending whether pos/normal are given in obj or world space)
// Based on: medium.com/@bgolus/normal-mapping-for-a-triplanar-shader-10bf39dca05a
float3 triplanarNormal(float3 vertPos, float3 normal, float3 scale, float2 offset, sampler2D normalMap) {
float3 absNormal = abs(normal);

// Calculate triplanar blend
float3 blendWeight = saturate(pow(normal, 4));
// Divide blend weight by the sum of its components. This will make x + y + z = 1
blendWeight /= dot(blendWeight, 1);

// Calculate triplanar coordinates
float2 uvX = vertPos.zy * scale + offset;
float2 uvY = vertPos.xz * scale + offset;
float2 uvZ = vertPos.xy * scale + offset;

// Sample tangent space normal maps
// UnpackNormal puts values in range [-1, 1] (and accounts for DXT5nm compression)
float3 tangentNormalX = UnpackNormal(tex2D(normalMap, uvX));
float3 tangentNormalY = UnpackNormal(tex2D(normalMap, uvY));
float3 tangentNormalZ = UnpackNormal(tex2D(normalMap, uvZ));

// Swizzle normals to match tangent space and apply reoriented normal mapping blend
tangentNormalX = blend_rnm(half3(normal.zy, absNormal.x), tangentNormalX);
tangentNormalY = blend_rnm(half3(normal.xz, absNormal.y), tangentNormalY);
tangentNormalZ = blend_rnm(half3(normal.xy, absNormal.z), tangentNormalZ);

// Apply input normal sign to tangent space Z
float3 axisSign = sign(normal);
tangentNormalX.z *= axisSign.x;
tangentNormalY.z *= axisSign.y;
tangentNormalZ.z *= axisSign.z;

// Swizzle tangent normals to match input normal and blend together
float3 outputNormal = normalize(
tangentNormalX.zyx * blendWeight.x +
tangentNormalY.xzy * blendWeight.y +
tangentNormalZ.xyz * blendWeight.z
);

return outputNormal;
}

float3 triplanarNormalTangentSpace(float3 vertPos, float3 normal, float3 scale, float4 tangent, sampler2D normalMap) {
float3 textureNormal = triplanarNormal(vertPos, normal, scale, 0, normalMap);
return ObjectToTangentVector(tangent, normal, textureNormal);
}

float3 triplanarNormalTangentSpace(float3 vertPos, float3 normal, float3 scale, float2 offset, float4 tangent, sampler2D normalMap) {
float3 textureNormal = triplanarNormal(vertPos, normal, scale, offset, normalMap);
return ObjectToTangentVector(tangent, normal, textureNormal);
}

然后就是修改着色shader

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
Shader "Custom/PlanetSurfaceShader" {
Properties {
_Color ("Color Tint", Color) = (1, 1, 1, 1)
_SecondColor ("Second Color", Color) = (1, 1, 1, 1)
_SecondColorRange ("Second Color Range", Float) = 1
_MainTex ("Main Tex", 2D) = "white" {}
_MainTexScale ("Main Tex Scale", Float) = 1
_BumpMap ("Bump Map", 2D) = "bump" {}
_BumpMapScale ("Bump Map Scale", Float) = 1
_BumpMapStrength ("Bump Map Strength", Float) = 0.1
_Specular ("Specular", Color) = (1, 1, 1, 1)
_Gloss ("Gloss", Range(8.0, 256)) = 20
}
SubShader {
Pass {
Tags { "LightMode"="ForwardBase" }

CGPROGRAM

#pragma vertex vert
#pragma fragment frag


#include "Lighting.cginc"
#include "./Compute/Triplanar.cginc"
#define Use_Simplex
#include "./Compute/Noise.cginc"

fixed4 _Color;
fixed4 _SecondColor;
float _SecondColorRange;
sampler2D _MainTex;
float4 _MainTex_ST;
float _MainTexScale;
sampler2D _BumpMap;
float4 _BumpMap_ST;
float _BumpMapScale;
float _BumpMapStrength;
fixed4 _Specular;
float _Gloss;

struct a2v {
float4 vertex : POSITION;
float3 normal : NORMAL;
};

struct v2f {
float4 pos : SV_POSITION;
float4 vertex : TEXCOORD0;
float3 normal : NORMAL;
float3 worldPos : TEXCOORD2;
};

v2f vert(a2v v) {
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);

o.vertex = v.vertex;
o.normal = v.normal;

float3 worldPos = mul(unity_ObjectToWorld, v.vertex).xyz;
o.worldPos = worldPos;

return o;
}

fixed4 frag(v2f i) : SV_Target {
// Compute the light and view dir in world space
fixed3 lightDir = normalize(UnityWorldSpaceLightDir(i.worldPos));
fixed3 viewDir = normalize(UnityWorldSpaceViewDir(i.worldPos));

fixed3 bump = triplanarNormal(i.vertex, i.normal, _BumpMapScale, _BumpMap_ST, _BumpMap);

bump = lerp(i.normal, bump, _BumpMapStrength);

float4 texColor = triplanar(i.vertex, i.normal, _MainTexScale, _MainTex);
float4 albedoMainColor = texColor * _Color;
float4 albedoSecondColor = texColor * _SecondColor;

//float secondColorWeight = noise(i.vertex.xyz);
float secondColorWeight = noise_with_weight(i.vertex.xyz, _SecondColorRange);

float4 albedo = lerp(albedoMainColor, albedoSecondColor, secondColorWeight);

fixed3 ambient = UNITY_LIGHTMODEL_AMBIENT.xyz * albedo;

fixed3 diffuse = _LightColor0.rgb * albedo * max(0, dot(bump, lightDir));

fixed3 halfDir = normalize(lightDir + viewDir);

fixed3 specular = _LightColor0.rgb * _Specular.rgb * pow(max(0, dot(bump, halfDir)), _Gloss);

return fixed4(ambient + diffuse + specular, 1.0);
}

ENDCG
}
}
FallBack "Specular"
}

除了修改贴贴图的方式,我还在着色器里额外增加了一种副颜色,表现星球上不同的地表成分,然后用noise_with_weight()这个函数在沿用噪声的情况下改变两种颜色的占比

1
2
3
float noise_with_weight(float3 p, float weight) {
return pow(abs(noise(p)), weight);
}

效果不错
image.png

另外背后的星空我就直接引用了视频作者的代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
using System.Collections;
using System.Collections.Generic;
using UnityEngine;

public class StarDome : MonoBehaviour
{

public MeshRenderer starPrefab;
public Vector2 radiusMinMax;
public int count = 1000;
const float calibrationDst = 2000;
public Vector2 brightnessMinMax;

Camera cam;

void Start()
{
cam = Camera.main;
//var sw = System.Diagnostics.Stopwatch.StartNew ();
if (cam)
{
float starDst = cam.farClipPlane - radiusMinMax.y;
float scale = starDst / calibrationDst;

for (int i = 0; i < count; i++)
{
MeshRenderer star = Instantiate(starPrefab, Random.onUnitSphere * starDst, Quaternion.identity, transform);
float t = SmallestRandomValue(6);
star.transform.localScale = Vector3.one * Mathf.Lerp(radiusMinMax.x, radiusMinMax.y, t) * scale;
star.material.color = Color.Lerp(Color.black, star.material.color, Mathf.Lerp(brightnessMinMax.x, brightnessMinMax.y, t));
}
}
//Debug.Log (sw.ElapsedMilliseconds);
}

float SmallestRandomValue(int iterations)
{
float r = 1;
for (int i = 0; i < iterations; i++)
{
r = Mathf.Min(r, Random.value);
}
return r;
}

void LateUpdate()
{
if (cam != null)
{
transform.position = cam.transform.position;
}
}
}

水面

这真是我这里花费时间最多的一部分,一开始以为只是重新画个球,然后想办法应用一些水的材质,但是实际上的效果很差,感觉只是加了一层有波浪的保鲜膜,
水的颜色应该与看到的深度有关,即看到的越深,水的颜色(蓝色)越多,透过水看到的颜色越少。

于是使用后处理来做这件事,在渲染水之前,已经获得了深度图,也就是摄像机看到的星球地形的深度,再与球体求一些交点,就能算出水的深度。

视频博主没有详细介绍怎么实现的,我仔细看了他的项目才明白到底是一个什么流程。

首先给摄像机添加后处理脚本,其中的[ImageEffectAllowedInSceneView]
[ImageEffectOpaque] private void OnRenderImage(RenderTexture src, RenderTexture dest) 是unity后处理流程的重要部分

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;

[ExecuteInEditMode, ImageEffectAllowedInSceneView]
public class CustomPostProcess : MonoBehaviour
{
List<RenderTexture> renderSources = new List<RenderTexture>();
List<RenderTexture> renderTargets = new List<RenderTexture>();
List<Material> materials = new List<Material>();
public Material defaultMat;

[ImageEffectOpaque]
private void OnRenderImage(RenderTexture src, RenderTexture dest)
{
materials.Clear();
materials.Add(defaultMat);
renderSources.Clear();
renderTargets.Clear();

renderSources.Add(src);

for (int i = 0; i < materials.Count - 1; i++)
{
var temp = RenderTexture.GetTemporary(src.width, src.height, 0, dest.graphicsFormat);
renderTargets.Add(temp);
renderSources.Add(temp);
}

renderTargets.Add(dest);

for (int i = 0; i < materials.Count; i++)
{
// Bit does the following:
// - sets _MainTex property on material to the source texture
// - sets the render target to the destination texture
// - draws a full-screen quad
// This copies the src texture to the dest texture, with whatever modifications the shader makes
Graphics.Blit(renderSources[i], renderTargets[i], materials[i]);
}

// Release all temporary render texture so they can be reused
for (int i = 0; i < renderTargets.Count - 1; i++)
{
RenderTexture.ReleaseTemporary(renderTargets[i]);
}

}

}

另外 camera 中的脚本需要添加 GetComponent<Camera>().depthTextureMode = DepthTextureMode.Depth;
告诉摄像机保留深度图,这样就可以在shader里用 sample2D _CameraDepthTexture 声明后直接获得深度图

material的shader如下,if前就是在计算水的深度,而if里就是根据水的深度计算颜色、纹理等

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
Shader "Custom/PlanetWaterShader" {
Properties {
_MainTex ("Main Tex", 2D) = "white" {}
_OceanCentre ("Ocean Centre", Vector) = (0, 0, 0, 0)
_OceanRadius ("Ocean Radius", Float) = 1.0
_PlanetScale ("PlanetScale", Float) = 1.0
_DirToSun ("Dir To Sun", vector) = (1, 0, 0, 0)
_ColA ("Color A", Color) = (1, 1, 1, 1)
_ColB ("Color B", Color) = (1, 1, 1, 1)

_WaveNormalA ("Wave Normal A", 2D) = "white" {}
_WaveNormalB ("Wave Normal B", 2D) = "white" {}
_WaveStrength ("Wave Strength", Float) = 1.0
_WaveNormalScale ("Wave Normal Scale", Float) = 1.0
_WaveSpeed ("Wave Speed", Float) = 10.0

_SpecularCol ("Specular Color", Color) = (1, 1, 1, 1)
_DepthMultiplier ("Depth Multiplier", Float) = 1.0
_AlphaMultiplier ("Alpha Multiplier", Float) = 1.0
_Smoothness ("Smoothness", Float) = 1.0
}
SubShader {

Cull Off ZWrite Off ZTest Always

Pass {

CGPROGRAM

#pragma vertex vert
#pragma fragment frag


#include "Lighting.cginc"
#include "./Compute/Triplanar.cginc"
#define Use_Simplex
#include "./Compute/Noise.cginc"
#include "./Compute/Math.cginc"

sampler2D _CameraDepthTexture;

sampler2D _MainTex;
float4 _OceanCentre;
float _OceanRadius;
float _PlanetScale;
float3 _DirToSun;

sampler2D _WaveNormalA;
sampler2D _WaveNormalB;
float _WaveStrength;
float _WaveNormalScale;
float _WaveSpeed;

float4 params;

float4 _ColA;
float4 _ColB;
float4 _SpecularCol;
float _DepthMultiplier;
float _AlphaMultiplier;
float _Smoothness;

struct a2v {
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};

struct v2f {
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 viewVector : TEXCOORD1;
};

v2f vert(a2v v) {
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;

float3 viewVector = mul(unity_CameraInvProjection, float4(v.uv*2-1, 0, -1));
o.viewVector = mul(unity_CameraToWorld, float4(viewVector,0));
return o;
}

fixed4 frag(v2f i) : SV_Target {
fixed4 originalCol = tex2D(_MainTex, i.uv);

float3 rayPos = _WorldSpaceCameraPos;
float viewLength = length(i.viewVector);
float3 rayDir = i.viewVector / viewLength;

float nonlin_depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, i.uv);
float sceneDepth = LinearEyeDepth(nonlin_depth) * viewLength;

float2 hitInfo = raySphere(_OceanCentre, _OceanRadius, rayPos, rayDir);
float dstToOcean = hitInfo.x;
float dstThroughOcean = hitInfo.y;
float3 rayOceanIntersectPos = rayPos + rayDir * dstToOcean - _OceanCentre;

float oceanViewDepth = min(dstThroughOcean, sceneDepth - dstToOcean);

if (oceanViewDepth > 0) {
float3 clipPlanePos = rayPos + i.viewVector * _ProjectionParams.y;

float dstAboveWater = length(clipPlanePos - _OceanCentre) - _OceanRadius;

float t = 1 - exp(-oceanViewDepth / _PlanetScale * _DepthMultiplier);
float alpha = 1-exp(-oceanViewDepth / _PlanetScale * _AlphaMultiplier);
float4 oceanCol = lerp(_ColA, _ColB, t);

float3 oceanSphereNormal = normalize(rayOceanIntersectPos);

float2 waveOffsetA = float2(_Time.x * _WaveSpeed, _Time.x * _WaveSpeed * 0.8);
float2 waveOffsetB = float2(_Time.x * _WaveSpeed * - 0.8, _Time.x * _WaveSpeed * -0.3);
float3 waveNormal = triplanarNormal(rayOceanIntersectPos, oceanSphereNormal, _WaveNormalScale / _PlanetScale, waveOffsetA, _WaveNormalA);
waveNormal = triplanarNormal(rayOceanIntersectPos, waveNormal, _WaveNormalScale / _PlanetScale, waveOffsetB, _WaveNormalB);
waveNormal = normalize(lerp(oceanSphereNormal, waveNormal, _WaveStrength));
//return float4(oceanNormal * .5 + .5,1);
float diffuseLighting = saturate(dot(oceanSphereNormal, _DirToSun));
float specularAngle = acos(dot(normalize(_DirToSun - rayDir), waveNormal));
float specularExponent = specularAngle / (1 - _Smoothness);
float specularHighlight = exp(-specularExponent * specularExponent);

oceanCol *= diffuseLighting;
oceanCol += specularHighlight * (dstAboveWater > 0) * _SpecularCol;

//return float4(oceanSphereNormal,1);
float4 finalCol = originalCol * (1-alpha) + oceanCol * alpha;
return float4(finalCol.xyz, params.x);
}

return originalCol;
}

ENDCG
}
}
FallBack "Specular"
}

最后的结果
image.png

小结

做完这些部分已经感觉有了大致的样子,可惜结果只是差强人意,顶多像个带水的陨石。
总结了一下原因,主要是因为自然界的地形生成有自己的规律,不可能是靠一些噪音函数就能描述的,增加一些用几何函数描述的刻意地形:比如月球坑,平坦的大陆等,虽然过程很刻意,但是结果是反而是更加符合自然界的。

另外,距离我想象中的星球还差的很远,比如要有季节,水循环,生物群落,等等,这篇文章也已经有点长了,下一次有新内容就再起一篇博客把。

不错的博客,从简单的角度理解地形生成
https://www.cnblogs.com/KillerAery/p/11509470.html

关于游戏中随机噪声的总结
https://zhuanlan.zhihu.com/p/47959352

鱼宝推荐的视频,这正是我想要做出的效果
https://www.youtube.com/watch?v=lctXaT9pxA0

学习unity的mesh生成
https://zhuanlan.zhihu.com/p/96084686

nvidia的gpu地形生成
https://developer.nvidia.com/gpugems/gpugems3/part-i-geometry/chapter-1-generating-complex-procedural-terrains-using-gpu

unity的inspector面板编辑
https://www.bbsmax.com/A/x9J2b2jMJ6/
https://blog.csdn.net/qq_57896821/article/details/121264618

Unity Attributes 自带特性
https://www.jianshu.com/p/41399298f992

  • 版权声明: 本博客所有文章除特别声明外,著作权归作者所有。转载请注明出处!

请我喝杯咖啡吧~

支付宝
微信