人工智能AI编程基础(八)

本文介绍如何在TensorFlow中使用下标和切片来访问和操作张量中的元素,包括基本的下标操作、类似NumPy的下标取值、基本切片操作、多维度切片以及多维度间隔取值等高级用法。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

tensorflow支持下标取值和切片,类似numpy的风格,详细可见代码以及其中的注释。

# coding: utf-8
# @时间 : 2022/3/26 14:58
# @作者 : 那美那美 v: qwentest123
import tensorflow as tf
from datetime import datetime
def pprint(*args,**kwargs):
    print(datetime.now(),*args,**kwargs,end='\n'+'*'*50+'\n')
print('# 基本下标操作')
a = tf.ones([1,5,5,3])#[1bitch,5width,5size,3chanels]
pprint(a)#shape(1,5,5,3) 切片的取值方法tesor[index][index][index]
pprint(a[0][0])#shape(5,3)
pprint(a[0][0][0])#shape(3,)
pprint(a[0][0][0][2])#shape(3,),second
#**************************************************
print('# 类似numpy的下标取值的操作')
a = tf.random.normal([4,28,28,3])#[4bitch,28width,28size,3chanels]
pprint(a)
pprint(a[0].shape)#[28,28,3] 由外向内
pprint(a[0,2].shape)#[28,3]
print(a[0,2,3].shape)#[3,]
print(a[0,1,2,1].shape)#[3,],third,没有维度,因为此时是一个标量
#**************************************************
print('# 基本的切片操作')
a = tf.range(10)
pprint(a[-1:])#最后一个
pprint(a[:2])#左开右闭
#**************************************************
print('# 多维度的切片')
a = tf.random.normal([4,28,28,3])
pprint(a[0,:,:,:].shape)#第1个[28,28,3]
pprint(a[0,1,:,:].shape)#[28,3]
pprint(a[:,:,:,0].shape)#[4,28,28],因为它取的是前面所有,而最里层的是第0个,所以是[4,28,28]
pprint(a[:,:,:,2].shape)#[4,28,28]
pprint(a[:,0,:,:].shape)#[4,28,3]
#**************************************************
print('# 多维度间隔取值')
pprint(a[0:2,:,:,:].shape) #[2,28,28,3]最外围的维度,取2组
pprint(a[:,0:28:2,0:28:2,:].shape) #[4,14,14,3],因为中间两个维度是每2取1,相对于尺寸变小一半
pprint(a[:,14:,14:,:].shape)#[4,14,14,3],中间两个维度取[14:]后面的所有,所以输出是14
pprint(a[1:,14:,14:,:].shape)#[3,14,14,3],最外围的维度只取了3组数据
#**************************************************
print('# ...中间省略')
pprint(a[0,...].shape)#[28,28,3]
pprint(a[...,0].shape)#[4,28,28],取最里面的第1组数据,但是是所有内容
pprint(a[0,...,2].shape)#[28,28],取0就是[28,28,3],后面的再去1个就是[28,28]

输出结果:

# 基本下标操作
2022-03-26 15:45:57.517134: I tensorflow/core/platform/cpu_feature_guard.cc:151] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX AVX2
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2022-03-26 15:45:58.882353: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1525] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 3497 MB memory:  -> device: 0, name: NVIDIA GeForce RTX 3060 Laptop GPU, pci bus id: 0000:01:00.0, compute capability: 8.6
2022-03-26 15:45:59.041282 tf.Tensor(
[[[[1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]]

  [[1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]]

  [[1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]]

  [[1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]]

  [[1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]
   [1. 1. 1.]]]], shape=(1, 5, 5, 3), dtype=float32)
**************************************************
2022-03-26 15:45:59.049286 tf.Tensor(
[[1. 1. 1.]
 [1. 1. 1.]
 [1. 1. 1.]
 [1. 1. 1.]
 [1. 1. 1.]], shape=(5, 3), dtype=float32)
**************************************************
2022-03-26 15:45:59.051275 tf.Tensor([1. 1. 1.], shape=(3,), dtype=float32)
**************************************************
2022-03-26 15:45:59.053287 tf.Tensor(1.0, shape=(), dtype=float32)
**************************************************
# 类似numpy的下标取值的操作
2022-03-26 15:45:59.060276 tf.Tensor(
[[[[-1.30680159e-01 -1.01863611e+00  4.75848407e-01]
   [-6.78366721e-01  1.48825848e+00  4.44680452e-01]
   [ 3.24972481e-01 -1.16034091e+00  1.69006741e+00]
   ...
   [-2.14387909e-01  2.66888690e+00 -1.50309348e+00]
   [ 7.23290503e-01 -6.67086244e-02  6.46718323e-01]
   [-1.19195199e+00  8.48199010e-01 -1.46268249e-01]]

  [[ 6.01237774e-01  8.35252941e-01 -6.52508974e-01]
   [-1.31495202e+00 -8.16744864e-01 -3.58833760e-01]
   [ 2.33520344e-01 -2.17982364e+00  1.37097406e+00]
   ...
   [ 1.95766532e+00 -6.39507920e-02 -1.93403459e+00]
   [-9.89666581e-02 -1.62199402e+00  9.35288966e-01]
   [-2.17107952e-01  4.47697878e-01 -9.19387877e-01]]

  [[ 1.39820409e+00 -7.13251114e-01 -2.04785407e-01]
   [ 2.09407896e-01 -1.64472544e+00  3.07895064e-01]
   [-6.56968057e-02  1.42071486e+00  1.44199997e-01]
   ...
   [-6.64576709e-01 -9.43112493e-01 -6.02046192e-01]
   [ 8.16091239e-01 -9.94130731e-01  1.00042212e+00]
   [ 1.15573215e+00 -5.10190129e-01  1.86875954e-01]]

  ...

  [[-6.19262636e-01 -8.40132475e-01  4.07314569e-01]
   [-5.08638322e-01 -6.58391118e-01 -3.68193775e-01]
   [-4.14637268e-01 -1.16865671e+00  1.02708173e+00]
   ...
   [-1.25140429e+00 -3.69913608e-01  4.36387181e-01]
   [-1.90863144e+00  1.51600349e+00  8.86991858e-01]
   [-3.91400397e-01 -1.72628510e+00 -4.21762943e-01]]

  [[-1.72937755e-04 -1.38329434e+00 -1.22515655e+00]
   [ 7.39709079e-01 -8.24310541e-01 -3.76223534e-01]
   [-9.47761536e-01 -1.59052932e+00 -7.27479875e-01]
   ...
   [-8.37630510e-01 -3.03390771e-01 -8.09681509e-03]
   [ 3.43363553e-01 -1.38703334e+00 -1.29916513e+00]
   [ 6.86376169e-02  1.08932066e+00 -5.10741293e-01]]

  [[ 2.35234785e+00  1.35932076e+00 -1.16747177e+00]
   [ 7.26505339e-01  5.94435871e-01 -4.13747638e-01]
   [-9.78729606e-01  9.52886283e-01 -4.64297026e-01]
   ...
   [ 5.35715640e-01  3.85097593e-01 -4.59191024e-01]
   [-4.64222521e-01 -2.94075179e+00 -9.89401281e-01]
   [-1.35721362e+00 -7.27195561e-01  9.51134026e-01]]]


 [[[ 7.31981993e-01  1.24837291e+00 -2.85771966e+00]
   [ 6.72764182e-02  6.89042091e-01  3.53663683e-01]
   [-3.84308010e-01 -3.23957533e-01 -2.60260195e-01]
   ...
   [ 1.39741182e+00  1.37750065e+00 -1.82982162e-01]
   [-2.54990548e-01  2.72116303e-01 -5.15931964e-01]
   [ 8.82133543e-01  7.85762742e-02  7.53871679e-01]]

  [[-1.18782151e+00 -3.40703309e-01 -9.11638439e-01]
   [-1.13980675e+00 -8.47539604e-01 -1.36104310e+00]
   [-8.94776762e-01 -5.34510732e-01  2.60276246e+00]
   ...
   [ 2.93496937e-01  7.71946013e-01  1.22609071e-01]
   [-8.32211614e-01  1.25817692e+00  1.41016078e+00]
   [-1.25119817e+00  4.64457601e-01 -4.44131821e-01]]

  [[ 3.44704658e-01 -9.28567410e-01 -9.49506238e-02]
   [ 1.33511317e+00  8.38591397e-01  6.63774252e-01]
   [ 1.41794384e-01 -1.67777836e+00 -1.13119340e+00]
   ...
   [-1.22153056e+00  8.87443662e-01  7.87536055e-02]
   [-2.19494581e+00 -7.37355113e-01  2.13345623e+00]
   [-8.75866652e-01 -3.39595765e-01  7.11260960e-02]]

  ...

  [[-1.00813425e+00  2.42697865e-01 -5.12792528e-01]
   [ 7.64714658e-01 -1.50156164e+00  1.47253752e-01]
   [ 1.77670801e+00  6.40561640e-01 -1.66489333e-01]
   ...
   [ 4.43284631e-01 -2.19472432e+00 -2.18999195e+00]
   [ 7.80576095e-02 -6.74637139e-01  3.49140167e-02]
   [-9.57691789e-01 -2.03180298e-01 -2.14587599e-01]]

  [[ 3.73735696e-01 -7.16442049e-01  8.44266564e-02]
   [ 6.06565714e-01  1.68852770e+00  2.43373603e-01]
   [-1.00145459e-01  9.77356732e-01 -1.69337380e+00]
   ...
   [-1.46459711e+00  3.28806311e-01 -3.74825090e-01]
   [ 5.98105371e-01  1.66807818e+00  1.21285796e+00]
   [ 1.04382181e+00 -2.36471027e-01 -8.31134319e-01]]

  [[ 1.23682928e+00 -4.13765199e-02  1.35704637e+00]
   [-2.13446927e+00  1.04637660e-01 -1.29045457e-01]
   [-2.00253320e+00  2.46626139e-01  1.48442864e+00]
   ...
   [-1.26547492e+00  7.32864380e-01 -3.05279613e-01]
   [-5.69633208e-02  1.06303549e+00  6.43325865e-01]
   [ 1.48971879e+00  5.18749118e-01 -1.89363551e+00]]]


 [[[ 5.00895321e-01 -5.01940191e-01  8.55227828e-01]
   [ 1.85303605e+00 -4.80809540e-01 -5.91060638e-01]
   [-3.13258320e-01  1.08818613e-01 -8.52218211e-01]
   ...
   [ 1.31850529e+00  2.41992146e-01 -6.21650398e-01]
   [ 1.38241565e+00  5.52040815e-01 -4.63320225e-01]
   [-8.96987438e-01  1.08059382e+00 -9.19498146e-01]]

  [[ 1.59865391e+00 -9.05260682e-01  1.10672794e-01]
   [ 9.82701898e-01 -4.51007456e-01  1.63281083e+00]
   [ 4.08629775e-02  1.11151494e-01  1.54181266e+00]
   ...
   [ 2.85049438e-01 -1.65663040e+00  5.10219671e-02]
   [-1.33044541e+00  1.08506107e+00 -3.96539927e-01]
   [-9.97954726e-01 -1.37387002e+00  2.01901942e-01]]

  [[-9.27011967e-01 -6.83104694e-01 -5.29794574e-01]
   [ 1.60543367e-01  6.38479233e-01  5.32185197e-01]
   [-4.96778250e-01 -6.22256875e-01 -3.04465652e-01]
   ...
   [-7.38721967e-01  7.02534020e-01  1.26042259e+00]
   [-2.09313989e+00  6.58707559e-01  5.46983004e-01]
   [ 1.24035561e+00 -1.38115776e+00  1.28890872e+00]]

  ...

  [[-1.56000626e+00 -4.68812674e-01 -6.13434255e-01]
   [-1.16495514e+00 -7.56822899e-02 -2.71274820e-02]
   [-1.09618843e+00  2.54081845e-01  1.26724577e+00]
   ...
   [-3.26130152e-01 -3.15856010e-01  4.70309794e-01]
   [-4.07581300e-01  4.59568501e-01 -4.35880758e-02]
   [-2.36655489e-01 -1.89829826e+00 -6.77113235e-01]]

  [[ 4.83973116e-01 -8.97059143e-01 -4.39148657e-02]
   [ 1.22302437e+00  8.18540454e-01 -2.52303958e-01]
   [ 5.98370731e-01 -5.59830070e-02 -1.06863260e+00]
   ...
   [-3.58597964e-01  6.90848410e-01 -1.96414971e+00]
   [ 1.27100754e+00 -1.06780134e-01  5.63212395e-01]
   [ 1.05877829e+00  3.77991498e-01  6.90370739e-01]]

  [[-4.08594280e-01  2.45937873e-02 -1.79675627e+00]
   [-1.34273994e+00 -1.81957483e+00  8.96802723e-01]
   [-2.24822235e+00  1.12677169e+00 -1.14819753e+00]
   ...
   [-8.82175744e-01  2.21573010e-01 -2.29014421e+00]
   [ 3.83693129e-01  2.28983617e+00 -6.23628855e-01]
   [-2.48336401e-02 -6.90657735e-01  1.48447052e-01]]]


 [[[-1.45130825e+00 -1.44105971e-01 -1.04453579e-01]
   [-5.51139414e-01  6.82088912e-01  4.01337653e-01]
   [ 1.31013542e-01  6.52241051e-01  4.29635346e-01]
   ...
   [-1.21092892e+00  9.78002846e-01 -9.57988143e-01]
   [-1.22533548e+00 -3.34495068e-01 -2.98309624e-01]
   [ 6.24448014e-03  2.44012788e-01  4.74973843e-02]]

  [[ 3.14173102e-01 -5.78815937e-01  4.56285805e-01]
   [-8.10654700e-01 -9.98128176e-01 -8.54979232e-02]
   [-6.87356591e-01 -3.35537404e-01 -1.74845171e+00]
   ...
   [-2.13581419e+00  7.62590468e-01  4.72389795e-02]
   [-2.65116662e-01  1.54433918e+00  3.59028429e-01]
   [-1.60090968e-01  4.33500350e-01  8.44461679e-01]]

  [[-7.56713077e-02  5.36138296e-01  1.24713862e+00]
   [-2.03282446e-01 -1.30538571e+00 -2.91983128e-01]
   [ 6.63685203e-01  6.74082488e-02 -9.23025191e-01]
   ...
   [-4.33401108e-01  8.24725270e-01  9.16847661e-02]
   [-9.17182148e-01 -2.40052957e-02 -7.20979631e-01]
   [-5.57259858e-01 -1.16178143e+00 -8.01396072e-01]]

  ...

  [[ 1.14524543e+00 -1.62447691e-01 -2.80867636e-01]
   [-3.46600950e-01 -4.44777995e-01  1.69094706e+00]
   [ 7.66932964e-01 -4.47134256e-01  1.17231035e+00]
   ...
   [ 5.82169518e-02  2.68486238e+00  2.84807146e-01]
   [-4.42855328e-01  1.72797084e-01  5.72124481e-01]
   [-4.04444009e-01 -7.82767981e-02 -1.20414495e+00]]

  [[ 1.11356401e+00  1.44590706e-01 -7.74609447e-01]
   [-1.43489271e-01 -4.38937545e-01 -2.88151473e-01]
   [ 1.11335135e+00  1.20081892e-03  7.94602275e-01]
   ...
   [ 8.86751592e-01 -9.27388728e-01 -3.57239693e-01]
   [-5.18646538e-01  3.07954615e-03  8.68428469e-01]
   [-2.76983604e-02 -4.04984593e-01  2.50524968e-01]]

  [[-1.22856426e+00  1.74282169e+00 -6.65673986e-02]
   [-1.40957677e+00  1.04646671e+00  1.98647767e-01]
   [-1.37095523e+00  8.72497380e-01 -9.03716326e-01]
   ...
   [-2.60202795e-01  3.47825021e-01 -4.27771389e-01]
   [ 1.50011516e+00  1.95287716e+00  1.25516981e-01]
   [-2.52160728e-01  6.35711670e-01 -2.10774255e+00]]]], shape=(4, 28, 28, 3), dtype=float32)
**************************************************
2022-03-26 15:45:59.068277 (28, 28, 3)
**************************************************
2022-03-26 15:45:59.069279 (28, 3)
**************************************************
(3,)
()
# 基本的切片操作
2022-03-26 15:45:59.072279 tf.Tensor([9], shape=(1,), dtype=int32)
**************************************************
2022-03-26 15:45:59.072279 tf.Tensor([0 1], shape=(2,), dtype=int32)
**************************************************
# 多维度的切片
2022-03-26 15:45:59.073289 (28, 28, 3)
**************************************************
2022-03-26 15:45:59.073289 (28, 3)
**************************************************
2022-03-26 15:45:59.073289 (4, 28, 28)
**************************************************
2022-03-26 15:45:59.074285 (4, 28, 28)
**************************************************
2022-03-26 15:45:59.074285 (4, 28, 3)
**************************************************
# 多维度间隔取值
2022-03-26 15:45:59.074285 (2, 28, 28, 3)
**************************************************
2022-03-26 15:45:59.075290 (4, 14, 14, 3)
**************************************************
2022-03-26 15:45:59.076292 (4, 14, 14, 3)
**************************************************
2022-03-26 15:45:59.076292 (3, 14, 14, 3)
**************************************************
# ...中间省略
2022-03-26 15:45:59.076292 (28, 28, 3)
**************************************************
2022-03-26 15:45:59.076292 (4, 28, 28)
**************************************************
2022-03-26 15:45:59.077284 (28, 28)
**************************************************

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

那美那美

失业了,写文章求吃碗炒面

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值