• Day 85:网络构建 (代码抄写)


    1. package dl;
    2. import java.util.Arrays;
    3. import dl.Dataset.Instance;
    4. import dl.MathUtils.Operator;
    5. /**
    6. * CNN.
    7. */
    8. public class FullCnn {
    9. /**
    10. * The value changes.
    11. */
    12. private static double ALPHA = 0.85;
    13. /**
    14. * A constant.
    15. */
    16. public static double LAMBDA = 0;
    17. /**
    18. * Manage layers.
    19. */
    20. private static LayerBuilder layerBuilder;
    21. /**
    22. * Train using a number of instances simultaneously.
    23. */
    24. private int batchSize;
    25. /**
    26. * Divide the batch size with the given value.
    27. */
    28. private Operator divideBatchSize;
    29. /**
    30. * Multiply alpha with the given value.
    31. */
    32. private Operator multiplyAlpha;
    33. /**
    34. * Multiply lambda and alpha with the given value.
    35. */
    36. private Operator multiplyLambda;
    37. /**
    38. ***********************
    39. * The first constructor.
    40. *
    41. ***********************
    42. */
    43. public FullCnn(LayerBuilder paraLayerBuilder, int paraBatchSize) {
    44. layerBuilder = paraLayerBuilder;
    45. batchSize = paraBatchSize;
    46. setup();
    47. initOperators();
    48. }// Of the first constructor
    49. /**
    50. ***********************
    51. * Initialize operators using temporary classes.
    52. ***********************
    53. */
    54. private void initOperators() {
    55. divideBatchSize = new Operator() {
    56. private static final long serialVersionUID = 7424011281732651055L;
    57. @Override
    58. public double process(double value) {
    59. return value / batchSize;
    60. }// Of process
    61. };
    62. multiplyAlpha = new Operator() {
    63. private static final long serialVersionUID = 5761368499808006552L;
    64. @Override
    65. public double process(double value) {
    66. return value * ALPHA;
    67. }// Of process
    68. };
    69. multiplyLambda = new Operator() {
    70. private static final long serialVersionUID = 4499087728362870577L;
    71. @Override
    72. public double process(double value) {
    73. return value * (1 - LAMBDA * ALPHA);
    74. }// Of process
    75. };
    76. }// Of initOperators
    77. /**
    78. ***********************
    79. * Setup according to the layer builder.
    80. ***********************
    81. */
    82. public void setup() {
    83. CnnLayer tempInputLayer = layerBuilder.getLayer(0);
    84. tempInputLayer.initOutMaps(batchSize);
    85. for (int i = 1; i < layerBuilder.getNumLayers(); i++) {
    86. CnnLayer tempLayer = layerBuilder.getLayer(i);
    87. CnnLayer tempFrontLayer = layerBuilder.getLayer(i - 1);
    88. int tempFrontMapNum = tempFrontLayer.getOutMapNum();
    89. switch (tempLayer.getType()) {
    90. case INPUT:
    91. // Should not be input. Maybe an error should be thrown out.
    92. break;
    93. case CONVOLUTION:
    94. tempLayer.setMapSize(
    95. tempFrontLayer.getMapSize().subtract(tempLayer.getKernelSize(), 1));
    96. tempLayer.initKernel(tempFrontMapNum);
    97. tempLayer.initBias();
    98. tempLayer.initErrors(batchSize);
    99. tempLayer.initOutMaps(batchSize);
    100. break;
    101. case SAMPLING:
    102. tempLayer.setOutMapNum(tempFrontMapNum);
    103. tempLayer.setMapSize(tempFrontLayer.getMapSize().divide(tempLayer.getScaleSize()));
    104. tempLayer.initErrors(batchSize);
    105. tempLayer.initOutMaps(batchSize);
    106. break;
    107. case OUTPUT:
    108. tempLayer.initOutputKernel(tempFrontMapNum, tempFrontLayer.getMapSize());
    109. tempLayer.initBias();
    110. tempLayer.initErrors(batchSize);
    111. tempLayer.initOutMaps(batchSize);
    112. break;
    113. }// Of switch
    114. } // Of for i
    115. }// Of setup
    116. /**
    117. ***********************
    118. * Forward computing.
    119. ***********************
    120. */
    121. private void forward(Instance instance) {
    122. setInputLayerOutput(instance);
    123. for (int l = 1; l < layerBuilder.getNumLayers(); l++) {
    124. CnnLayer tempCurrentLayer = layerBuilder.getLayer(l);
    125. CnnLayer tempLastLayer = layerBuilder.getLayer(l - 1);
    126. switch (tempCurrentLayer.getType()) {
    127. case CONVOLUTION:
    128. setConvolutionOutput(tempCurrentLayer, tempLastLayer);
    129. break;
    130. case SAMPLING:
    131. setSampOutput(tempCurrentLayer, tempLastLayer);
    132. break;
    133. case OUTPUT:
    134. setConvolutionOutput(tempCurrentLayer, tempLastLayer);
    135. break;
    136. default:
    137. break;
    138. }// Of switch
    139. } // Of for l
    140. }// Of forward
    141. /**
    142. ***********************
    143. * Set the in layer output. Given a record, copy its values to the input
    144. * map.
    145. ***********************
    146. */
    147. private void setInputLayerOutput(Instance paraRecord) {
    148. CnnLayer tempInputLayer = layerBuilder.getLayer(0);
    149. Size tempMapSize = tempInputLayer.getMapSize();
    150. double[] tempAttributes = paraRecord.getAttributes();
    151. if (tempAttributes.length != tempMapSize.width * tempMapSize.height)
    152. throw new RuntimeException("input record does not match the map size.");
    153. for (int i = 0; i < tempMapSize.width; i++) {
    154. for (int j = 0; j < tempMapSize.height; j++) {
    155. tempInputLayer.setMapValue(0, i, j, tempAttributes[tempMapSize.height * i + j]);
    156. } // Of for j
    157. } // Of for i
    158. }// Of setInputLayerOutput
    159. /**
    160. ***********************
    161. * Compute the convolution output according to the output of the last layer.
    162. *
    163. * @param paraLastLayer
    164. * the last layer.
    165. * @param paraLayer
    166. * the current layer.
    167. ***********************
    168. */
    169. private void setConvolutionOutput(final CnnLayer paraLayer, final CnnLayer paraLastLayer) {
    170. // int mapNum = paraLayer.getOutMapNum();
    171. final int lastMapNum = paraLastLayer.getOutMapNum();
    172. // Attention: paraLayer.getOutMapNum() may not be right.
    173. for (int j = 0; j < paraLayer.getOutMapNum(); j++) {
    174. double[][] tempSumMatrix = null;
    175. for (int i = 0; i < lastMapNum; i++) {
    176. double[][] lastMap = paraLastLayer.getMap(i);
    177. double[][] kernel = paraLayer.getKernel(i, j);
    178. if (tempSumMatrix == null) {
    179. // On the first map.
    180. tempSumMatrix = MathUtils.convnValid(lastMap, kernel);
    181. } else {
    182. // Sum up convolution maps
    183. tempSumMatrix = MathUtils.matrixOp(MathUtils.convnValid(lastMap, kernel),
    184. tempSumMatrix, null, null, MathUtils.plus);
    185. } // Of if
    186. } // Of for i
    187. // Activation.
    188. final double bias = paraLayer.getBias(j);
    189. tempSumMatrix = MathUtils.matrixOp(tempSumMatrix, new Operator() {
    190. private static final long serialVersionUID = 2469461972825890810L;
    191. @Override
    192. public double process(double value) {
    193. return MathUtils.sigmod(value + bias);
    194. }
    195. });
    196. paraLayer.setMapValue(j, tempSumMatrix);
    197. } // Of for j
    198. }// Of setConvolutionOutput
    199. /**
    200. ***********************
    201. * Compute the convolution output according to the output of the last layer.
    202. *
    203. * @param paraLastLayer
    204. * the last layer.
    205. * @param paraLayer
    206. * the current layer.
    207. ***********************
    208. */
    209. private void setSampOutput(final CnnLayer paraLayer, final CnnLayer paraLastLayer) {
    210. // int tempLastMapNum = paraLastLayer.getOutMapNum();
    211. // Attention: paraLayer.outMapNum may not be right.
    212. for (int i = 0; i < paraLayer.outMapNum; i++) {
    213. double[][] lastMap = paraLastLayer.getMap(i);
    214. Size scaleSize = paraLayer.getScaleSize();
    215. double[][] sampMatrix = MathUtils.scaleMatrix(lastMap, scaleSize);
    216. paraLayer.setMapValue(i, sampMatrix);
    217. } // Of for i
    218. }// Of setSampOutput
    219. /**
    220. ***********************
    221. * Train the cnn.
    222. ***********************
    223. */
    224. public void train(Dataset paraDataset, int paraRounds) {
    225. for (int t = 0; t < paraRounds; t++) {
    226. System.out.println("Iteration: " + t);
    227. int tempNumEpochs = paraDataset.size() / batchSize;
    228. if (paraDataset.size() % batchSize != 0)
    229. tempNumEpochs++;
    230. // logger.info("第{}次迭代,epochsNum: {}", t, epochsNum);
    231. double tempNumCorrect = 0;
    232. int tempCount = 0;
    233. for (int i = 0; i < tempNumEpochs; i++) {
    234. int[] tempRandomPerm = MathUtils.randomPerm(paraDataset.size(), batchSize);
    235. CnnLayer.prepareForNewBatch();
    236. for (int index : tempRandomPerm) {
    237. boolean isRight = train(paraDataset.getInstance(index));
    238. if (isRight)
    239. tempNumCorrect++;
    240. tempCount++;
    241. CnnLayer.prepareForNewRecord();
    242. } // Of for index
    243. updateParameters();
    244. if (i % 50 == 0) {
    245. System.out.print("..");
    246. if (i + 50 > tempNumEpochs)
    247. System.out.println();
    248. }
    249. }
    250. double p = 1.0 * tempNumCorrect / tempCount;
    251. if (t % 10 == 1 && p > 0.96) {
    252. ALPHA = 0.001 + ALPHA * 0.9;
    253. // logger.info("设置 alpha = {}", ALPHA);
    254. } // Of iff
    255. System.out.println("Training precision: " + p);
    256. // logger.info("计算精度: {}/{}={}.", right, count, p);
    257. } // Of for i
    258. }// Of train
    259. /**
    260. ***********************
    261. * Train the cnn with only one record.
    262. *
    263. * @param paraRecord
    264. * The given record.
    265. ***********************
    266. */
    267. private boolean train(Instance paraRecord) {
    268. forward(paraRecord);
    269. boolean result = backPropagation(paraRecord);
    270. return result;
    271. }// Of train
    272. /**
    273. ***********************
    274. * Back-propagation.
    275. *
    276. * @param paraRecord
    277. * The given record.
    278. ***********************
    279. */
    280. private boolean backPropagation(Instance paraRecord) {
    281. boolean result = setOutputLayerErrors(paraRecord);
    282. setHiddenLayerErrors();
    283. return result;
    284. }// Of backPropagation
    285. /**
    286. ***********************
    287. * Update parameters.
    288. ***********************
    289. */
    290. private void updateParameters() {
    291. for (int l = 1; l < layerBuilder.getNumLayers(); l++) {
    292. CnnLayer layer = layerBuilder.getLayer(l);
    293. CnnLayer lastLayer = layerBuilder.getLayer(l - 1);
    294. switch (layer.getType()) {
    295. case CONVOLUTION:
    296. case OUTPUT:
    297. updateKernels(layer, lastLayer);
    298. updateBias(layer, lastLayer);
    299. break;
    300. default:
    301. break;
    302. }// Of switch
    303. } // Of for l
    304. }// Of updateParameters
    305. /**
    306. ***********************
    307. * Update bias.
    308. ***********************
    309. */
    310. private void updateBias(final CnnLayer paraLayer, CnnLayer paraLastLayer) {
    311. final double[][][][] errors = paraLayer.getErrors();
    312. // int mapNum = paraLayer.getOutMapNum();
    313. // Attention: getOutMapNum() may not be correct.
    314. for (int j = 0; j < paraLayer.getOutMapNum(); j++) {
    315. double[][] error = MathUtils.sum(errors, j);
    316. double deltaBias = MathUtils.sum(error) / batchSize;
    317. double bias = paraLayer.getBias(j) + ALPHA * deltaBias;
    318. paraLayer.setBias(j, bias);
    319. } // Of for j
    320. }// Of updateBias
    321. /**
    322. ***********************
    323. * Update kernels.
    324. ***********************
    325. */
    326. private void updateKernels(final CnnLayer paraLayer, final CnnLayer paraLastLayer) {
    327. // int mapNum = paraLayer.getOutMapNum();
    328. int tempLastMapNum = paraLastLayer.getOutMapNum();
    329. // Attention: getOutMapNum() may not be right
    330. for (int j = 0; j < paraLayer.getOutMapNum(); j++) {
    331. for (int i = 0; i < tempLastMapNum; i++) {
    332. double[][] tempDeltaKernel = null;
    333. for (int r = 0; r < batchSize; r++) {
    334. double[][] error = paraLayer.getError(r, j);
    335. if (tempDeltaKernel == null)
    336. tempDeltaKernel = MathUtils.convnValid(paraLastLayer.getMap(r, i), error);
    337. else {
    338. tempDeltaKernel = MathUtils.matrixOp(
    339. MathUtils.convnValid(paraLastLayer.getMap(r, i), error),
    340. tempDeltaKernel, null, null, MathUtils.plus);
    341. } // Of if
    342. } // Of for r
    343. tempDeltaKernel = MathUtils.matrixOp(tempDeltaKernel, divideBatchSize);
    344. if (!rangeCheck(tempDeltaKernel, -10, 10)) {
    345. System.exit(0);
    346. } // Of if
    347. double[][] kernel = paraLayer.getKernel(i, j);
    348. tempDeltaKernel = MathUtils.matrixOp(kernel, tempDeltaKernel, multiplyLambda,
    349. multiplyAlpha, MathUtils.plus);
    350. paraLayer.setKernel(i, j, tempDeltaKernel);
    351. } // Of for i
    352. } // Of for j
    353. }// Of updateKernels
    354. /**
    355. ***********************
    356. * Set errors of all hidden layers.
    357. ***********************
    358. */
    359. private void setHiddenLayerErrors() {
    360. // System.out.println("setHiddenLayerErrors");
    361. for (int l = layerBuilder.getNumLayers() - 2; l > 0; l--) {
    362. CnnLayer layer = layerBuilder.getLayer(l);
    363. CnnLayer nextLayer = layerBuilder.getLayer(l + 1);
    364. // System.out.println("layertype = " + layer.getType());
    365. switch (layer.getType()) {
    366. case SAMPLING:
    367. setSamplingErrors(layer, nextLayer);
    368. break;
    369. case CONVOLUTION:
    370. setConvolutionErrors(layer, nextLayer);
    371. break;
    372. default:
    373. break;
    374. }// Of switch
    375. } // Of for l
    376. }// Of setHiddenLayerErrors
    377. /**
    378. ***********************
    379. * Set errors of a sampling layer.
    380. ***********************
    381. */
    382. private void setSamplingErrors(final CnnLayer paraLayer, final CnnLayer paraNextLayer) {
    383. // int mapNum = layer.getOutMapNum();
    384. int tempNextMapNum = paraNextLayer.getOutMapNum();
    385. // Attention: getOutMapNum() may not be correct
    386. for (int i = 0; i < paraLayer.getOutMapNum(); i++) {
    387. double[][] sum = null;
    388. for (int j = 0; j < tempNextMapNum; j++) {
    389. double[][] nextError = paraNextLayer.getError(j);
    390. double[][] kernel = paraNextLayer.getKernel(i, j);
    391. if (sum == null) {
    392. sum = MathUtils.convnFull(nextError, MathUtils.rot180(kernel));
    393. } else {
    394. sum = MathUtils.matrixOp(
    395. MathUtils.convnFull(nextError, MathUtils.rot180(kernel)), sum, null,
    396. null, MathUtils.plus);
    397. } // Of if
    398. } // Of for j
    399. paraLayer.setError(i, sum);
    400. if (!rangeCheck(sum, -2, 2)) {
    401. System.out.println(
    402. "setSampErrors, error out of range.\r\n" + Arrays.deepToString(sum));
    403. } // Of if
    404. } // Of for i
    405. }// Of setSamplingErrors
    406. /**
    407. ***********************
    408. * Set errors of a sampling layer.
    409. ***********************
    410. */
    411. private void setConvolutionErrors(final CnnLayer paraLayer, final CnnLayer paraNextLayer) {
    412. // System.out.println("setConvErrors");
    413. for (int m = 0; m < paraLayer.getOutMapNum(); m++) {
    414. Size tempScale = paraNextLayer.getScaleSize();
    415. double[][] tempNextLayerErrors = paraNextLayer.getError(m);
    416. double[][] tempMap = paraLayer.getMap(m);
    417. double[][] tempOutMatrix = MathUtils.matrixOp(tempMap, MathUtils.cloneMatrix(tempMap),
    418. null, MathUtils.one_value, MathUtils.multiply);
    419. tempOutMatrix = MathUtils.matrixOp(tempOutMatrix,
    420. MathUtils.kronecker(tempNextLayerErrors, tempScale), null, null,
    421. MathUtils.multiply);
    422. paraLayer.setError(m, tempOutMatrix);
    423. // System.out.println("range check nextError");
    424. if (!rangeCheck(tempNextLayerErrors, -10, 10)) {
    425. System.out.println("setConvErrors, nextError out of range:\r\n"
    426. + Arrays.deepToString(tempNextLayerErrors));
    427. System.out.println("the new errors are:\r\n" + Arrays.deepToString(tempOutMatrix));
    428. System.exit(0);
    429. } // Of if
    430. if (!rangeCheck(tempOutMatrix, -10, 10)) {
    431. System.out.println("setConvErrors, error out of range.");
    432. System.exit(0);
    433. } // Of if
    434. } // Of for m
    435. }// Of setConvolutionErrors
    436. /**
    437. ***********************
    438. * Set errors of a sampling layer.
    439. ***********************
    440. */
    441. private boolean setOutputLayerErrors(Instance paraRecord) {
    442. CnnLayer tempOutputLayer = layerBuilder.getOutputLayer();
    443. int tempMapNum = tempOutputLayer.getOutMapNum();
    444. double[] tempTarget = new double[tempMapNum];
    445. double[] tempOutMaps = new double[tempMapNum];
    446. for (int m = 0; m < tempMapNum; m++) {
    447. double[][] outmap = tempOutputLayer.getMap(m);
    448. tempOutMaps[m] = outmap[0][0];
    449. } // Of for m
    450. int tempLabel = paraRecord.getLabel().intValue();
    451. tempTarget[tempLabel] = 1;
    452. // Log.i(record.getLable() + "outmaps:" +
    453. // Util.fomart(outmaps)
    454. // + Arrays.toString(target));
    455. for (int m = 0; m < tempMapNum; m++) {
    456. tempOutputLayer.setError(m, 0, 0,
    457. tempOutMaps[m] * (1 - tempOutMaps[m]) * (tempTarget[m] - tempOutMaps[m]));
    458. } // Of for m
    459. return tempLabel == MathUtils.getMaxIndex(tempOutMaps);
    460. }// Of setOutputLayerErrors
    461. /**
    462. ***********************
    463. * Setup the network.
    464. ***********************
    465. */
    466. public void setup(int paraBatchSize) {
    467. CnnLayer tempInputLayer = layerBuilder.getLayer(0);
    468. tempInputLayer.initOutMaps(paraBatchSize);
    469. for (int i = 1; i < layerBuilder.getNumLayers(); i++) {
    470. CnnLayer tempLayer = layerBuilder.getLayer(i);
    471. CnnLayer tempLastLayer = layerBuilder.getLayer(i - 1);
    472. int tempLastMapNum = tempLastLayer.getOutMapNum();
    473. switch (tempLayer.getType()) {
    474. case INPUT:
    475. break;
    476. case CONVOLUTION:
    477. tempLayer.setMapSize(
    478. tempLastLayer.getMapSize().subtract(tempLayer.getKernelSize(), 1));
    479. tempLayer.initKernel(tempLastMapNum);
    480. tempLayer.initBias();
    481. tempLayer.initErrors(paraBatchSize);
    482. tempLayer.initOutMaps(paraBatchSize);
    483. break;
    484. case SAMPLING:
    485. tempLayer.setOutMapNum(tempLastMapNum);
    486. tempLayer.setMapSize(tempLastLayer.getMapSize().divide(tempLayer.getScaleSize()));
    487. tempLayer.initErrors(paraBatchSize);
    488. tempLayer.initOutMaps(paraBatchSize);
    489. break;
    490. case OUTPUT:
    491. tempLayer.initOutputKernel(tempLastMapNum, tempLastLayer.getMapSize());
    492. tempLayer.initBias();
    493. tempLayer.initErrors(paraBatchSize);
    494. tempLayer.initOutMaps(paraBatchSize);
    495. break;
    496. }// Of switch
    497. } // Of for i
    498. }// Of setup
    499. /**
    500. ***********************
    501. * Predict for the dataset.
    502. ***********************
    503. */
    504. public int[] predict(Dataset paraDataset) {
    505. System.out.println("Predicting ... ");
    506. CnnLayer.prepareForNewBatch();
    507. int[] resultPredictions = new int[paraDataset.size()];
    508. double tempCorrect = 0.0;
    509. Instance tempRecord;
    510. for (int i = 0; i < paraDataset.size(); i++) {
    511. tempRecord = paraDataset.getInstance(i);
    512. forward(tempRecord);
    513. CnnLayer outputLayer = layerBuilder.getOutputLayer();
    514. int tempMapNum = outputLayer.getOutMapNum();
    515. double[] tempOut = new double[tempMapNum];
    516. for (int m = 0; m < tempMapNum; m++) {
    517. double[][] outmap = outputLayer.getMap(m);
    518. tempOut[m] = outmap[0][0];
    519. } // Of for m
    520. resultPredictions[i] = MathUtils.getMaxIndex(tempOut);
    521. if (resultPredictions[i] == tempRecord.getLabel().intValue()) {
    522. tempCorrect++;
    523. } // Of if
    524. } // Of for
    525. System.out.println("Accuracy: " + tempCorrect / paraDataset.size());
    526. return resultPredictions;
    527. }// Of predict
    528. /**
    529. ***********************
    530. * Range check, only for debugging.
    531. *
    532. * @param paraMatrix
    533. * The given matrix.
    534. * @param paraLowerBound
    535. * @param paraUpperBound
    536. ***********************
    537. */
    538. public boolean rangeCheck(double[][] paraMatrix, double paraLowerBound, double paraUpperBound) {
    539. for (int i = 0; i < paraMatrix.length; i++) {
    540. for (int j = 0; j < paraMatrix[0].length; j++) {
    541. if ((paraMatrix[i][j] < paraLowerBound) || (paraMatrix[i][j] > paraUpperBound)) {
    542. System.out.println("" + paraMatrix[i][j] + " out of range (" + paraLowerBound
    543. + ", " + paraUpperBound + ")\r\n");
    544. return false;
    545. } // Of if
    546. } // Of for j
    547. } // Of for i
    548. return true;
    549. }// Of rangeCheck
    550. /**
    551. ***********************
    552. * The main entrance.
    553. ***********************
    554. */
    555. public static void main(String[] args) {
    556. LayerBuilder builder = new LayerBuilder();
    557. // Input layer, the maps are 28*28
    558. builder.addLayer(new CnnLayer(LayerTypeEnum.INPUT, -1, new Size(28, 28)));
    559. // Convolution output has size 24*24, 24=28+1-5
    560. builder.addLayer(new CnnLayer(LayerTypeEnum.CONVOLUTION, 6, new Size(5, 5)));
    561. // Sampling output has size 12*12,12=24/2
    562. builder.addLayer(new CnnLayer(LayerTypeEnum.SAMPLING, -1, new Size(2, 2)));
    563. // Convolution output has size 8*8, 8=12+1-5
    564. builder.addLayer(new CnnLayer(LayerTypeEnum.CONVOLUTION, 12, new Size(5, 5)));
    565. // Sampling output has size4×4,4=8/2
    566. builder.addLayer(new CnnLayer(LayerTypeEnum.SAMPLING, -1, new Size(2, 2)));
    567. // output layer, digits 0 - 9.
    568. builder.addLayer(new CnnLayer(LayerTypeEnum.OUTPUT, 10, null));
    569. // Construct the full CNN.
    570. FullCnn tempCnn = new FullCnn(builder, 10);
    571. Dataset tempTrainingSet = new Dataset("C:\\Users\\86183\\IdeaProjects\\deepLearning\\src\\main\\java\\resources\\train.format", ",", 784);
    572. // Train the model.
    573. tempCnn.train(tempTrainingSet, 10);
    574. // tempCnn.predict(tempTrainingSet);
    575. }// Of main
    576. }// Of class FullCnn

    结果:

  • 相关阅读:
    Mediapipe 实现3D人脸检测
    JAVA毕业设计航空订票系统计算机源码+lw文档+系统+调试部署+数据库
    Stream竟然还有应用进阶学习?作为程序员的你知道吗
    Spring Boot国际化&&AcceptHeaderLocaleResolver 解析器
    13 redis中的复制的拓扑结构
    【每日一题Day35】LC878第N个神奇数字 | 二分查找 找规律 + 数学
    Redis不止能存储字符串,还有List、Set、Hash、Zset,用对了能给你带来哪些优势?
    Verasity Tokenomics — 社区讨论总结与下一步计划
    MACOS查看硬盘读写量
    聊天功能演示系统发布后出现有些页面滚动与鼠标点击问题解决
  • 原文地址:https://blog.csdn.net/Chunghyyn/article/details/132572110