Baselight

Nifty 50 Stocks Price Dataset

Nifty 50 Stocks Prices

@kaggle.tadakasuryateja_nifty_50_stocks

Loading...
Loading...

About this Dataset

Nifty 50 Stocks Price Dataset

A dataset containing prices of Nifty 50 Stocks. Contains open, high, low, close prices, volume of the stock.
This dataset is meant to use for predicting the stock prices in future.
You may use linear regression or neural networks.****

Tables

Itc

@kaggle.tadakasuryateja_nifty_50_stocks.itc
  • 191.89 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE itc (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Kotakbank

@kaggle.tadakasuryateja_nifty_50_stocks.kotakbank
  • 195.4 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE kotakbank (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Lt

@kaggle.tadakasuryateja_nifty_50_stocks.lt
  • 210.19 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE lt (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Lupin

@kaggle.tadakasuryateja_nifty_50_stocks.lupin
  • 191.58 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE lupin (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Mandm

@kaggle.tadakasuryateja_nifty_50_stocks.mandm
  • 251.69 kB
  • 5,827 rows
  • 10 columns
Loading...
CREATE TABLE mandm (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Maruti

@kaggle.tadakasuryateja_nifty_50_stocks.maruti
  • 207.96 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE maruti (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Mcdowell N

@kaggle.tadakasuryateja_nifty_50_stocks.mcdowell_n
  • 5.8 kB
  • 10 columns
Loading...
CREATE TABLE mcdowell_n (
  "date" VARCHAR,
  "stock_name" VARCHAR,
  "n_1_open" VARCHAR  -- 1. Open,
  "n_2_high" VARCHAR  -- 2. High,
  "n_3_low" VARCHAR  -- 3. Low,
  "n_4_close" VARCHAR  -- 4. Close,
  "n_5_adjusted_close" VARCHAR  -- 5. Adjusted Close,
  "n_6_volume" VARCHAR  -- 6. Volume,
  "n_7_dividend_amount" VARCHAR  -- 7. Dividend Amount,
  "n_8_split_coefficient" VARCHAR  -- 8. Split Coefficient
);

Nestleind

@kaggle.tadakasuryateja_nifty_50_stocks.nestleind
  • 204.46 kB
  • 4,431 rows
  • 10 columns
Loading...
CREATE TABLE nestleind (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Ntpc

@kaggle.tadakasuryateja_nifty_50_stocks.ntpc
  • 171.2 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE ntpc (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Ongc

@kaggle.tadakasuryateja_nifty_50_stocks.ongc
  • 193.15 kB
  • 4,431 rows
  • 10 columns
Loading...
CREATE TABLE ongc (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Powergrid

@kaggle.tadakasuryateja_nifty_50_stocks.powergrid
  • 141.73 kB
  • 3,747 rows
  • 10 columns
Loading...
CREATE TABLE powergrid (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Reliance

@kaggle.tadakasuryateja_nifty_50_stocks.reliance
  • 207.71 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE reliance (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Sbin

@kaggle.tadakasuryateja_nifty_50_stocks.sbin
  • 199.44 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE sbin (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Sunpharma

@kaggle.tadakasuryateja_nifty_50_stocks.sunpharma
  • 203.69 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE sunpharma (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Tatamotors

@kaggle.tadakasuryateja_nifty_50_stocks.tatamotors
  • 220.69 kB
  • 4,433 rows
  • 10 columns
Loading...
CREATE TABLE tatamotors (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Tatasteel

@kaggle.tadakasuryateja_nifty_50_stocks.tatasteel
  • 195.17 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE tatasteel (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Tcs

@kaggle.tadakasuryateja_nifty_50_stocks.tcs
  • 202.27 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE tcs (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Techm

@kaggle.tadakasuryateja_nifty_50_stocks.techm
  • 176.55 kB
  • 4,021 rows
  • 10 columns
Loading...
CREATE TABLE techm (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Ultracemco

@kaggle.tadakasuryateja_nifty_50_stocks.ultracemco
  • 201.48 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE ultracemco (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Unionbank

@kaggle.tadakasuryateja_nifty_50_stocks.unionbank
  • 177.53 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE unionbank (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Vedl

@kaggle.tadakasuryateja_nifty_50_stocks.vedl
  • 192.71 kB
  • 4,430 rows
  • 10 columns
Loading...
CREATE TABLE vedl (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Wipro

@kaggle.tadakasuryateja_nifty_50_stocks.wipro
  • 213.85 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE wipro (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Zeel

@kaggle.tadakasuryateja_nifty_50_stocks.zeel
  • 205.45 kB
  • 4,432 rows
  • 10 columns
Loading...
CREATE TABLE zeel (
  "date" TIMESTAMP,
  "stock_name" VARCHAR,
  "n_1_open" DOUBLE  -- 1. Open,
  "n_2_high" DOUBLE  -- 2. High,
  "n_3_low" DOUBLE  -- 3. Low,
  "n_4_close" DOUBLE  -- 4. Close,
  "n_5_adjusted_close" DOUBLE  -- 5. Adjusted Close,
  "n_6_volume" BIGINT  -- 6. Volume,
  "n_7_dividend_amount" DOUBLE  -- 7. Dividend Amount,
  "n_8_split_coefficient" DOUBLE  -- 8. Split Coefficient
);

Share link

Anyone who has the link will be able to view this.